Compare commits
177 commits
v0.5.0-rc.
...
0.5
Author | SHA1 | Date | |
---|---|---|---|
|
32906400ec | ||
|
5c2f831c26 | ||
|
97d9251f69 | ||
|
89a2103164 | ||
|
0c159e7689 | ||
|
4f75af88b7 | ||
|
f009c957ca | ||
3569ff32cd | |||
|
c3e764ddda | ||
|
db3335bbf7 | ||
|
711b0009cb | ||
|
f88b800d51 | ||
|
28df3bcca4 | ||
|
0f74e2efa3 | ||
|
a6c1108d5b | ||
|
9535175a45 | ||
|
d9d27cf45b | ||
|
60fb68fcf3 | ||
|
78976834b2 | ||
|
52603ea43e | ||
|
0dde647042 | ||
|
3196a83b25 | ||
|
d387c27f16 | ||
|
a6846597ca | ||
|
3810a3b9ff | ||
|
52c502359b | ||
|
7d1e20da8c | ||
|
d35f34e8f0 | ||
|
9ee75ec9c9 | ||
|
919b0036e5 | ||
|
7466131f04 | ||
|
0be7dd4d0e | ||
|
f8d0bc6c4d | ||
|
381740d2ce | ||
|
a7ea8eb9c1 | ||
|
4a3619c26e | ||
|
16ab05ec72 | ||
|
36e6f16ca6 | ||
|
4843424dba | ||
|
e51bc9f9bb | ||
|
6d8731f1e5 | ||
|
49a42dc931 | ||
|
13594d6103 | ||
|
eab46e4ee5 | ||
|
7311427518 | ||
|
c94f0e55ec | ||
|
15af291f84 | ||
|
2b2d280fe0 | ||
|
0fa17a839f | ||
|
e30ec8a6cf | ||
|
f6fce8be9e | ||
|
4d3ddd50cb | ||
|
5513ef41a3 | ||
|
ac74c57709 | ||
|
5ba8c5dbb4 | ||
|
7b592bb719 | ||
|
f7d2d7cbb0 | ||
|
91a3a9b122 | ||
|
b53457c42c | ||
|
a4162cd300 | ||
|
e807c261a2 | ||
|
11a356c99a | ||
|
af30701a21 | ||
|
81ecd02df2 | ||
|
70f3bec275 | ||
|
385e36f1e4 | ||
|
f69c05a05a | ||
|
564d9de675 | ||
|
e5e2bbeeb4 | ||
|
f0aafe212d | ||
|
9b31718a0e | ||
|
083bf3badd | ||
|
43d0949a45 | ||
|
b475ff40e5 | ||
|
cb17c419d0 | ||
|
3aadebf3ea | ||
|
56579e38b2 | ||
|
4eeced440d | ||
|
60dafa0114 | ||
|
a9243b0214 | ||
|
97cc58afcf | ||
|
b5b3257cac | ||
|
15d6655889 | ||
|
80c47aa0e4 | ||
|
2c003c62aa | ||
|
b6a4d39c51 | ||
|
37a7c34084 | ||
|
dad3fad402 | ||
|
33917424a8 | ||
|
9268159dc6 | ||
|
3d662a6de3 | ||
|
bb92a06d64 | ||
|
a067fbd4bd | ||
|
e9bb21835c | ||
|
85312525f1 | ||
|
ac73a15c9d | ||
|
745828f926 | ||
|
00d4515849 | ||
|
d52a9cb615 | ||
|
4866559025 | ||
|
c5d60549c9 | ||
|
763bf2698f | ||
|
5a82f8616f | ||
|
00b470b173 | ||
|
24ad379b7c | ||
|
0ae1797ead | ||
|
be46042b51 | ||
|
4965d172be | ||
|
d8304d56a6 | ||
|
d68a1389ab | ||
|
9f93cb93d6 | ||
|
1be3bf505e | ||
|
dcd6a2a107 | ||
|
72eb48de07 | ||
|
9f3017742e | ||
|
dca495a467 | ||
|
b180bea063 | ||
|
19aa5eb52c | ||
|
f1c9cbc9fd | ||
|
1369fe990b | ||
|
f7808e452d | ||
|
15868acce0 | ||
|
a9b1fa655f | ||
|
d490c0a6f3 | ||
|
e2fe1c50b8 | ||
|
ab9124e02c | ||
|
b7444566dd | ||
|
2b0f29a2f9 | ||
|
37072eda24 | ||
|
9bc80a43db | ||
|
e53de00120 | ||
|
32d5f8c517 | ||
|
237d6e67e3 | ||
|
fde2ba1021 | ||
|
620777cacf | ||
|
09820e322c | ||
|
c9dc788056 | ||
|
00ea56745e | ||
|
397ea11ef5 | ||
|
5232abc1d5 | ||
|
4b623da2db | ||
|
699727793e | ||
|
76a78f462a | ||
|
7057211564 | ||
|
678430f96f | ||
|
f30e59e4b0 | ||
|
241e667bdc | ||
|
1640dab0c4 | ||
|
236edff6a0 | ||
|
b6f35b6209 | ||
|
b4c447a129 | ||
|
f0d04fc87c | ||
|
b1ae6aebda | ||
|
2e62d07265 | ||
|
50c7b4e542 | ||
|
8afc75d543 | ||
|
901b450a6c | ||
|
d346fe1d34 | ||
|
70d07feb70 | ||
|
92c6120d24 | ||
|
f0d19bb5e1 | ||
|
8aa41e7b73 | ||
|
14aeabeed2 | ||
|
051e062c39 | ||
|
87a45c0429 | ||
|
beaf143679 | ||
|
73a63c3664 | ||
|
441235e159 | ||
|
c7c1daab36 | ||
|
66a885b4e6 | ||
|
756b5c8257 | ||
|
d2e04f49d0 | ||
|
b3f0a3cbfb | ||
|
15df417472 | ||
|
e6773144db | ||
|
bf77b69b0b | ||
|
25260e5ab7 |
2
.env.example
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
PUBLIC_REGISTRY_URL= # url of the registry API, this must have a trailing slash and include the version
|
||||||
|
# example: https://registry.pesde.daimond113.com/v0/
|
1
.gitattributes
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
* text=auto
|
2
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
buy_me_a_coffee: daimond113
|
||||||
|
ko_fi: daimond113
|
79
.github/workflows/debug.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
name: Debug
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
get-version:
|
||||||
|
name: Get build version
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: v${{ steps.get_version.outputs.value }}+rev.g${{ steps.trim_sha.outputs.trimmed_sha }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Get package version
|
||||||
|
uses: SebRollen/toml-action@v1.2.0
|
||||||
|
id: get_version
|
||||||
|
with:
|
||||||
|
file: Cargo.toml
|
||||||
|
field: package.version
|
||||||
|
|
||||||
|
- name: Trim commit SHA
|
||||||
|
id: trim_sha
|
||||||
|
run: |
|
||||||
|
commit_sha=${{ github.sha }}
|
||||||
|
echo "trimmed_sha=${commit_sha:0:7}" | tee $GITHUB_OUTPUT
|
||||||
|
build:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- job-name: windows-x86_64
|
||||||
|
target: x86_64-pc-windows-msvc
|
||||||
|
runs-on: windows-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-windows-x86_64
|
||||||
|
|
||||||
|
- job-name: linux-x86_64
|
||||||
|
target: x86_64-unknown-linux-gnu
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
|
||||||
|
|
||||||
|
- job-name: macos-x86_64
|
||||||
|
target: x86_64-apple-darwin
|
||||||
|
runs-on: macos-13
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-x86_64
|
||||||
|
|
||||||
|
- job-name: macos-aarch64
|
||||||
|
target: aarch64-apple-darwin
|
||||||
|
runs-on: macos-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-aarch64
|
||||||
|
|
||||||
|
name: Build for ${{ matrix.job-name }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
needs: get-version
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Linux build dependencies
|
||||||
|
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install libdbus-1-dev pkg-config
|
||||||
|
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
|
- name: Compile in debug mode
|
||||||
|
run: cargo build --bins --no-default-features --features bin,patches,wally-compat --target ${{ matrix.target }} --locked
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.artifact-name }}
|
||||||
|
if-no-files-found: error
|
||||||
|
path: |
|
||||||
|
target/${{ matrix.target }}/debug/pesde.exe
|
||||||
|
target/${{ matrix.target }}/debug/pesde
|
52
.github/workflows/release.yaml
vendored
|
@ -4,8 +4,44 @@ on:
|
||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
env:
|
env:
|
||||||
|
CRATE_NAME: pesde
|
||||||
BIN_NAME: pesde
|
BIN_NAME: pesde
|
||||||
jobs:
|
jobs:
|
||||||
|
prepare:
|
||||||
|
name: Prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.extract_version.outputs.VERSION }}
|
||||||
|
found: ${{ steps.ensure_not_published.outputs.FOUND }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Extract version
|
||||||
|
id: extract_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION=$(echo ${{ github.ref_name }} | cut -d'+' -f1 | cut -c 2-)
|
||||||
|
echo "VERSION=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Ensure not published
|
||||||
|
id: ensure_not_published
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
VERSION: ${{ steps.extract_version.outputs.VERSION }}
|
||||||
|
run: |
|
||||||
|
CRATE_NAME="${{ env.CRATE_NAME }}"
|
||||||
|
if [ ${#CRATE_NAME} -eq 1 ]; then
|
||||||
|
DIR="1"
|
||||||
|
elif [ ${#CRATE_NAME} -eq 2 ]; then
|
||||||
|
DIR="2"
|
||||||
|
elif [ ${#CRATE_NAME} -eq 3 ]; then
|
||||||
|
DIR="3/${CRATE_NAME:0:1}"
|
||||||
|
else
|
||||||
|
DIR="${CRATE_NAME:0:2}/${CRATE_NAME:2:2}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
FOUND=$(curl -sSL --fail-with-body "https://index.crates.io/$DIR/${{ env.CRATE_NAME }}" | jq -s 'any(.[]; .vers == "${{ env.VERSION }}")')
|
||||||
|
echo "FOUND=$FOUND" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -31,12 +67,17 @@ jobs:
|
||||||
target: aarch64-apple-darwin
|
target: aarch64-apple-darwin
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: Build for ${{ matrix.host }}-${{ matrix.arch }}
|
name: Build for ${{ matrix.host }}-${{ matrix.arch }}
|
||||||
|
needs: [ prepare ]
|
||||||
|
if: ${{ needs.prepare.outputs.found == 'false' }}
|
||||||
|
env:
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: Set env
|
- name: Set env
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_NAME=${{ env.BIN_NAME }}-$(echo ${{ github.ref_name }} | cut -c 2-)-${{ matrix.host }}-${{ matrix.arch }}
|
ARCHIVE_NAME=${{ env.BIN_NAME }}-${{ env.VERSION }}-${{ matrix.host }}-${{ matrix.arch }}
|
||||||
|
|
||||||
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
|
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
@ -80,6 +121,7 @@ jobs:
|
||||||
needs: [ build ]
|
needs: [ build ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: Publish
|
- name: Publish
|
||||||
run: cargo publish --token ${{ secrets.CRATES_IO_TOKEN }} --allow-dirty --locked
|
run: cargo publish --token ${{ secrets.CRATES_IO_TOKEN }} --allow-dirty --locked
|
||||||
|
|
||||||
|
@ -89,7 +131,9 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
pull-requests: read
|
pull-requests: read
|
||||||
needs: [ build, publish ]
|
needs: [ prepare, publish ]
|
||||||
|
env:
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -105,7 +149,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
tag_name: ${{ github.ref_name }}
|
tag_name: ${{ github.ref_name }}
|
||||||
name: ${{ github.ref_name }}
|
name: v${{ env.VERSION }}
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: ${{ startsWith(github.ref_name, 'v0') }}
|
prerelease: ${{ startsWith(env.VERSION, '0') }}
|
||||||
files: artifacts/*
|
files: artifacts/*
|
31
.github/workflows/test-and-lint.yaml
vendored
|
@ -11,30 +11,33 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Rust
|
# we use some nightly rustfmt features, so we need nightly toolchain
|
||||||
uses: moonrepo/setup-rust@v1
|
- uses: dtolnay/rust-toolchain@nightly
|
||||||
with:
|
with:
|
||||||
bins: cargo-tarpaulin
|
components: rustfmt
|
||||||
components: rustfmt, clippy
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
components: clippy
|
||||||
|
|
||||||
- name: Install OS dependencies
|
- name: Install OS dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install libdbus-1-dev pkg-config
|
sudo apt-get install libdbus-1-dev pkg-config
|
||||||
|
|
||||||
- name: Run tests
|
# pesde currently does not have any tests. Bring this back when (if) tests are added.
|
||||||
run: cargo test --all --all-features
|
# - name: Run tests
|
||||||
|
# run: cargo test --all --all-features
|
||||||
|
|
||||||
- name: Check formatting
|
- name: Check formatting
|
||||||
run: cargo fmt --all -- --check
|
run: cargo +nightly fmt --all -- --check
|
||||||
|
|
||||||
- name: Run clippy
|
- name: Run clippy
|
||||||
run: cargo clippy --all --all-targets --all-features -- -D warnings
|
run: cargo clippy --all --all-targets --all-features -- -D warnings
|
||||||
|
|
||||||
- name: Generate coverage report
|
# - name: Generate coverage report
|
||||||
run: cargo tarpaulin --all-features --out xml --exclude-files src/cli/* --exclude-files registry/* --exclude-files src/main.rs --skip-clean
|
# run: cargo tarpaulin --all-features --out xml --exclude-files src/cli/* --exclude-files registry/* --exclude-files src/main.rs --skip-clean
|
||||||
|
#
|
||||||
- name: Upload coverage reports to Codecov
|
# - name: Upload coverage reports to Codecov
|
||||||
uses: codecov/codecov-action@v4.0.1
|
# uses: codecov/codecov-action@v4.0.1
|
||||||
with:
|
# with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
# token: ${{ secrets.CODECOV_TOKEN }}
|
112
CHANGELOG.md
|
@ -5,34 +5,93 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [0.5.0-rc.5] - 2024-10-12
|
## [0.5.3] - 2024-12-30
|
||||||
### Added
|
### Added
|
||||||
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Remove verbosity from release mode logging by @daimond113
|
||||||
|
|
||||||
|
## [0.5.2] - 2024-12-19
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
- Change dependency types for removed peer dependencies by @daimond113
|
||||||
- Allow use of Luau packages in `execute` command by @daimond113
|
- Resolve version to correct tag for `pesde_version` field by @daimond113
|
||||||
- Remove duplicated manifest file name in `publish` command by @daimond113
|
- Do not error on missing dependencies until full linking by @daimond113
|
||||||
|
|
||||||
## [0.5.0-rc.4] - 2024-10-12
|
### Changed
|
||||||
### Added
|
- Switch from `log` to `tracing` for logging by @daimond113
|
||||||
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
|
||||||
- Publish all workspace members when publishing a workspace by @daimond113
|
|
||||||
|
|
||||||
|
## [0.5.1] - 2024-12-15
|
||||||
### Fixed
|
### Fixed
|
||||||
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
- Ignore build metadata when comparing CLI versions by @daimond113
|
||||||
|
|
||||||
## [0.5.0-rc.3] - 2024-10-06
|
## [0.5.0] - 2024-12-14
|
||||||
### Fixed
|
|
||||||
- Use workspace specifiers' `target` field when resolving by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.2] - 2024-10-06
|
|
||||||
### Added
|
### Added
|
||||||
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
||||||
|
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
||||||
|
- Publish all workspace members when publishing a workspace by @daimond113
|
||||||
|
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
||||||
|
- Support full version requirements in workspace version field by @daimond113
|
||||||
|
- Improved authentication system for registry changes by @daimond113
|
||||||
|
- New website by @lukadev-0
|
||||||
|
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
|
||||||
|
- Support fallback Wally registries by @daimond113
|
||||||
|
- Print that no updates are available in `outdated` command by @daimond113
|
||||||
|
- Support negated globs in `workspace_members` field by @daimond113
|
||||||
|
- Make `includes` use glob patterns by @daimond113
|
||||||
|
- Use symlinks for workspace dependencies to not require reinstalling by @daimond113
|
||||||
|
- Add `auth token` command to print the auth token for the index by @daimond113
|
||||||
|
- Support specifying which external registries are allowed on registries by @daimond113
|
||||||
|
- Add improved CLI styling by @daimond113
|
||||||
|
- Install pesde dependencies before Wally to support scripts packages by @daimond113
|
||||||
|
- Support packages exporting scripts by @daimond113
|
||||||
|
- Support using workspace root as a member by @daimond113
|
||||||
|
- Allow multiple, user selectable scripts packages to be selected (& custom packages inputted) in `init` command by @daimond113
|
||||||
|
- Support granular control over which repositories are allowed in various specifier types by @daimond113
|
||||||
|
- Display included scripts in `publish` command by @daimond113
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fix versions with dots not being handled correctly by @daimond113
|
- Fix versions with dots not being handled correctly by @daimond113
|
||||||
|
- Use workspace specifiers' `target` field when resolving by @daimond113
|
||||||
|
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
||||||
|
- Remove duplicated manifest file name in `publish` command by @daimond113
|
||||||
|
- Allow use of Luau packages in `execute` command by @daimond113
|
||||||
|
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
||||||
|
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
|
||||||
|
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
|
||||||
|
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
|
||||||
|
- Make GitHub OAuth client ID config optional by @daimond113
|
||||||
|
- Use updated aliases when reusing lockfile dependencies by @daimond113
|
||||||
|
- Listen for device flow completion without requiring pressing enter by @daimond113
|
||||||
|
- Sync scripts repo in background by @daimond113
|
||||||
|
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
|
||||||
|
- Validate package names are lowercase by @daimond113
|
||||||
|
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
||||||
|
- Remove default.project.json from Git pesde dependencies by @daimond113
|
||||||
|
- Correctly (de)serialize workspace specifiers by @daimond113
|
||||||
|
- Fix CAS finder algorithm issues with Windows by @daimond113
|
||||||
|
- Fix CAS finder algorithm's AlreadyExists error by @daimond113
|
||||||
|
- Use moved path when setting file to read-only by @daimond113
|
||||||
|
- Correctly link Wally server packages by @daimond113
|
||||||
|
- Fix `self-install` doing a cross-device move by @daimond113
|
||||||
|
- Add back mistakenly removed updates check caching by @daimond113
|
||||||
|
- Set download error source to inner error to propagate the error by @daimond113
|
||||||
|
- Correctly copy workspace packages by @daimond113
|
||||||
|
- Fix peer dependencies being resolved incorrectly by @daimond113
|
||||||
|
- Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
|
||||||
|
- Install dependencies of packages in `x` command by @daimond113
|
||||||
|
- Fix `includes` not supporting root files by @daimond113
|
||||||
|
- Link dependencies before type extraction to support more use cases by @daimond113
|
||||||
|
- Strip `.luau` extension from linker modules' require paths to comply with Luau by @daimond113
|
||||||
|
- Correctly handle graph paths for resolving overriden packages by @daimond113
|
||||||
|
- Do not require `--` in bin package executables on Unix by @daimond113
|
||||||
|
- Do not require lib or bin exports if package exports scripts by @daimond113
|
||||||
|
- Correctly resolve URLs in `publish` command by @daimond113
|
||||||
|
- Add Roblox types in linker modules even with no config generator script by @daimond113
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Remove special scripts repo handling to favour standard packages by @daimond113
|
||||||
|
|
||||||
## [0.5.0-rc.1] - 2024-10-06
|
|
||||||
### Changed
|
### Changed
|
||||||
- Rewrite the entire project in a more maintainable way by @daimond113
|
- Rewrite the entire project in a more maintainable way by @daimond113
|
||||||
- Support workspaces by @daimond113
|
- Support workspaces by @daimond113
|
||||||
|
@ -40,9 +99,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
- Support multiple targets for a single package by @daimond113
|
- Support multiple targets for a single package by @daimond113
|
||||||
- Make registry much easier to self-host by @daimond113
|
- Make registry much easier to self-host by @daimond113
|
||||||
- Start maintaining a changelog by @daimond113
|
- Start maintaining a changelog by @daimond113
|
||||||
|
- Optimize boolean expression in `publish` command by @daimond113
|
||||||
|
- Switched to fs-err for better errors with file system operations by @daimond113
|
||||||
|
- Use body bytes over multipart for publishing packages by @daimond113
|
||||||
|
- `self-upgrade` now will check for updates by itself by default by @daimond113
|
||||||
|
- Only store `pesde_version` executables in the version cache by @daimond113
|
||||||
|
- Remove lower bound limit of 3 characters for pesde package names by @daimond113
|
||||||
|
|
||||||
[0.5.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.4..v0.5.0-rc.5
|
### Performance
|
||||||
[0.5.0-rc.4]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.3..v0.5.0-rc.4
|
- Clone dependency repos shallowly by @daimond113
|
||||||
[0.5.0-rc.3]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.2..v0.5.0-rc.3
|
- Switch to async Rust by @daimond113
|
||||||
[0.5.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.1..v0.5.0-rc.2
|
- Asyncify dependency linking by @daimond113
|
||||||
[0.5.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0-rc.1
|
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113
|
||||||
|
|
||||||
|
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||||
|
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||||
|
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0
|
||||||
|
[0.5.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
||||||
|
|
2032
Cargo.lock
generated
103
Cargo.toml
|
@ -1,35 +1,38 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pesde"
|
name = "pesde"
|
||||||
version = "0.5.0-rc.5"
|
version = "0.5.3"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
authors = ["daimond113 <contact@daimond113.com>"]
|
authors = ["daimond113 <contact@daimond113.com>"]
|
||||||
description = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
description = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||||
homepage = "https://pesde.daimond113.com"
|
homepage = "https://pesde.daimond113.com"
|
||||||
repository = "https://github.com/daimond113/pesde"
|
repository = "https://github.com/pesde-pkg/pesde"
|
||||||
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
|
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
bin = [
|
bin = [
|
||||||
"clap",
|
"dep:clap",
|
||||||
"dirs",
|
"dep:dirs",
|
||||||
"pretty_env_logger",
|
"dep:tracing-subscriber",
|
||||||
"reqwest/json",
|
"reqwest/json",
|
||||||
"reqwest/multipart",
|
"dep:indicatif",
|
||||||
"indicatif",
|
"dep:tracing-indicatif",
|
||||||
"indicatif-log-bridge",
|
"dep:inquire",
|
||||||
"inquire",
|
"dep:toml_edit",
|
||||||
"toml_edit",
|
"dep:colored",
|
||||||
"colored",
|
"dep:anyhow",
|
||||||
"anyhow",
|
"dep:keyring",
|
||||||
"keyring",
|
"dep:open",
|
||||||
"open",
|
|
||||||
"gix/worktree-mutation",
|
"gix/worktree-mutation",
|
||||||
"serde_json",
|
"dep:serde_json",
|
||||||
"winreg"
|
"dep:winreg",
|
||||||
|
"fs-err/expose_original_error",
|
||||||
|
"tokio/rt",
|
||||||
|
"tokio/rt-multi-thread",
|
||||||
|
"tokio/macros",
|
||||||
]
|
]
|
||||||
wally-compat = ["zip", "serde_json"]
|
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||||
patches = ["git2"]
|
patches = ["dep:git2"]
|
||||||
version-management = ["bin"]
|
version-management = ["bin"]
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
|
@ -41,42 +44,46 @@ required-features = ["bin"]
|
||||||
uninlined_format_args = "warn"
|
uninlined_format_args = "warn"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
serde_with = "3.9.0"
|
serde_with = "3.11.0"
|
||||||
gix = { version = "0.66.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials"] }
|
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||||
semver = { version = "1.0.23", features = ["serde"] }
|
semver = { version = "1.0.24", features = ["serde"] }
|
||||||
reqwest = { version = "0.12.7", default-features = false, features = ["rustls-tls", "blocking"] }
|
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
|
||||||
tar = "0.4.42"
|
tokio-tar = "0.3.1"
|
||||||
flate2 = "1.0.34"
|
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||||
pathdiff = "0.2.1"
|
pathdiff = "0.2.3"
|
||||||
relative-path = { version = "1.9.3", features = ["serde"] }
|
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||||
log = "0.4.22"
|
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||||
thiserror = "1.0.64"
|
thiserror = "2.0.7"
|
||||||
threadpool = "1.8.1"
|
tokio = { version = "1.42.0", features = ["process"] }
|
||||||
full_moon = { version = "1.0.0-rc.5", features = ["luau"] }
|
tokio-util = "0.7.13"
|
||||||
url = { version = "2.5.2", features = ["serde"] }
|
async-stream = "0.3.6"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
futures = "0.3.31"
|
||||||
|
full_moon = { version = "1.1.2", features = ["luau"] }
|
||||||
|
url = { version = "2.5.4", features = ["serde"] }
|
||||||
|
chrono = { version = "0.4.39", features = ["serde"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.14.0"
|
||||||
glob = "0.3.1"
|
wax = { version = "0.6.0", default-features = false }
|
||||||
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
|
|
||||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||||
git2 = { version = "0.19.0", optional = true }
|
git2 = { version = "0.19.0", optional = true }
|
||||||
|
|
||||||
zip = { version = "2.2.0", optional = true }
|
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
||||||
serde_json = { version = "1.0.128", optional = true }
|
serde_json = { version = "1.0.133", optional = true }
|
||||||
|
|
||||||
anyhow = { version = "1.0.89", optional = true }
|
anyhow = { version = "1.0.94", optional = true }
|
||||||
open = { version = "5.3.0", optional = true }
|
open = { version = "5.3.1", optional = true }
|
||||||
keyring = { version = "3.3.0", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
|
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
|
||||||
colored = { version = "2.1.0", optional = true }
|
colored = { version = "2.1.0", optional = true }
|
||||||
toml_edit = { version = "0.22.22", optional = true }
|
toml_edit = { version = "0.22.22", optional = true }
|
||||||
clap = { version = "4.5.18", features = ["derive"], optional = true }
|
clap = { version = "4.5.23", features = ["derive"], optional = true }
|
||||||
dirs = { version = "5.0.1", optional = true }
|
dirs = { version = "5.0.1", optional = true }
|
||||||
pretty_env_logger = { version = "0.5.0", optional = true }
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||||
indicatif = { version = "0.17.8", optional = true }
|
indicatif = { version = "0.17.9", optional = true }
|
||||||
indicatif-log-bridge = { version = "0.2.3", optional = true }
|
tracing-indicatif = { version = "0.3.8", optional = true }
|
||||||
inquire = { version = "0.7.5", optional = true }
|
inquire = { version = "0.7.5", optional = true }
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dependencies]
|
[target.'cfg(target_os = "windows")'.dependencies]
|
||||||
|
@ -88,3 +95,13 @@ members = ["registry"]
|
||||||
|
|
||||||
[profile.dev.package.full_moon]
|
[profile.dev.package.full_moon]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
opt-level = "s"
|
||||||
|
lto = true
|
||||||
|
incremental = true
|
||||||
|
codegen-units = 1
|
||||||
|
|
||||||
|
[profile.release.package.pesde-registry]
|
||||||
|
# add debug symbols for Sentry stack traces
|
||||||
|
debug = "full"
|
|
@ -1,10 +1,8 @@
|
||||||
FROM rust:1.81-bookworm AS builder
|
FROM rust:1.82-bookworm AS builder
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
WORKDIR /registry
|
RUN cargo build --release -p pesde-registry
|
||||||
|
|
||||||
RUN cargo build --release
|
|
||||||
|
|
||||||
FROM debian:bookworm-slim
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
|
|
36
README.md
|
@ -1,26 +1,28 @@
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
<img src="https://raw.githubusercontent.com/daimond113/pesde/0.5/assets/logotype.svg" alt="pesde logo" width="200" />
|
<img src="https://raw.githubusercontent.com/pesde-pkg/pesde/0.5/assets/logotype.svg" alt="pesde logo" width="200" />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
pesde is a package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune.
|
pesde is a package manager for the Luau programming language, supporting
|
||||||
pesde has its own registry, however it can also use Wally, and Git repositories as package sources.
|
multiple runtimes including Roblox and Lune. pesde has its own registry, however
|
||||||
It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
|
it can also use Wally, and Git repositories as package sources. It has been
|
||||||
|
designed with multiple targets in mind, namely Roblox, Lune, and Luau.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
pesde can be installed from GitHub Releases. You can find the latest
|
pesde can be installed from GitHub Releases. You can find the latest release
|
||||||
release [here](https://github.com/daimond113/pesde/releases). Once you have downloaded the binary,
|
[here](https://github.com/pesde-pkg/pesde/releases). Once you have downloaded
|
||||||
run the following command to install it:
|
the binary, run the following command to install it:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pesde self-install
|
pesde self-install
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that pesde manages its own versions, so you can update it by running the following command:
|
Note that pesde manages its own versions, so you can update it by running the
|
||||||
|
following command:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pesde self-upgrade
|
pesde self-upgrade
|
||||||
|
@ -28,19 +30,23 @@ pesde self-upgrade
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
For more information about its usage, you can check the [documentation](https://docs.pesde.daimond113.com).
|
For more information about its usage, you can check the
|
||||||
|
[documentation](https://docs.pesde.daimond113.com).
|
||||||
*Currently waiting on [this PR](https://github.com/daimond113/pesde/pull/3) to be merged.*
|
|
||||||
|
|
||||||
## Registry
|
## Registry
|
||||||
|
|
||||||
The main pesde registry is hosted on [fly.io](https://fly.io). You can find it at https://registry.pesde.daimond113.com.
|
The main pesde registry is hosted on [fly.io](https://fly.io). You can find it
|
||||||
|
at https://registry.pesde.daimond113.com.
|
||||||
|
|
||||||
### Self-hosting
|
### Self-hosting
|
||||||
|
|
||||||
The registry tries to require no modifications to be self-hosted. Please refer to the [example .env file](https://github.com/daimond113/pesde/blob/0.5/registry/.env.example) for more information.
|
The registry tries to require no modifications to be self-hosted. Please refer
|
||||||
|
to the
|
||||||
|
[documentation](http://docs.pesde.daimond113.com/guides/self-hosting-registries)
|
||||||
|
for more information.
|
||||||
|
|
||||||
## Previous art
|
## Previous art
|
||||||
|
|
||||||
pesde is heavily inspired by [npm](https://www.npmjs.com/), [pnpm](https://pnpm.io/), [Wally](https://wally.run),
|
pesde is heavily inspired by [npm](https://www.npmjs.com/),
|
||||||
and [Cargo](https://doc.rust-lang.org/cargo/).
|
[pnpm](https://pnpm.io/), [Wally](https://wally.run), and
|
||||||
|
[Cargo](https://doc.rust-lang.org/cargo/).
|
||||||
|
|
25
SECURITY.md
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
As pesde is currently in version 0.x, we can only guarantee security for:
|
||||||
|
- **The latest minor** (currently 0.5).
|
||||||
|
- **The latest release candidate for the next version**, if available.
|
||||||
|
|
||||||
|
When a new minor version is released, the previous version will immediately lose security support.
|
||||||
|
> **Note:** This policy will change with the release of version 1.0, which will include an extended support period for versions >=1.0.
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 0.5.x | :white_check_mark: |
|
||||||
|
| < 0.5 | :x: |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
We encourage all security concerns to be reported at [pesde@daimond113.com](mailto:pesde@daimond113.com), along the following format:
|
||||||
|
- **Subject**: The subject must be prefixed with `[SECURITY]` to ensure it is prioritized as a security concern.
|
||||||
|
- **Content**:
|
||||||
|
- **Affected Versions**: Clearly specify which are affected by the issue.
|
||||||
|
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
|
||||||
|
|
||||||
|
We will try to respond as soon as possible.
|
3
assets/logomark.svg
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<svg viewBox="0 0 100 100" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6025 0L92.9038 25V75L49.6025 100L6.30127 75V25L49.6025 0ZM14.3013 29.6188L49.6025 9.2376L84.9038 29.6188V70.3812L49.6025 90.7624L33.6148 81.5319V67.3848C34.5167 68.5071 35.6388 69.4215 36.981 70.1279C38.9701 71.148 41.0357 71.658 43.1779 71.658C46.442 71.658 49.1452 70.8929 51.2873 69.3629C53.4805 67.7818 55.1126 65.7672 56.1836 63.319C57.0915 61.3382 57.632 59.274 57.8054 57.1263C59.8723 57.7457 62.2157 58.0554 64.8356 58.0554C67.6918 58.0554 70.3695 57.6473 72.8686 56.8313C75.3678 55.9642 77.4079 54.8167 78.989 53.3886L75.7758 47.8038C74.5517 48.9258 72.9961 49.8439 71.109 50.5579C69.2219 51.221 67.2073 51.5525 65.0652 51.5525C61.3929 51.5525 58.6643 50.6854 56.8792 48.9513C56.7195 48.7962 56.567 48.6365 56.4217 48.472C55.6102 47.5539 55.0211 46.4896 54.6546 45.2791L54.6443 45.2452L54.669 45.2791H79.2185V41.9894C79.2185 39.0313 78.5555 36.3536 77.2294 33.9565C75.9543 31.5593 74.0927 29.6467 71.6445 28.2186C69.2474 26.7395 66.3657 26 62.9995 26C59.6843 26 56.8027 26.7395 54.3545 28.2186C51.9064 29.6467 50.0193 31.5593 48.6932 33.9565C47.6743 35.7983 47.0469 37.8057 46.8108 39.9788C45.6888 39.728 44.4778 39.6026 43.1779 39.6026C41.0357 39.6026 38.9701 40.1127 36.981 41.1327C35.3162 41.9651 33.9902 43.1549 33.0028 44.7023V40.3677H20.6855V46.2585H25.8113V77.0266L14.3013 70.3812V29.6188ZM55.1961 36.0986C54.6528 37.1015 54.3321 38.1216 54.234 39.1588H71.7976C71.7976 38.0367 71.4405 36.9401 70.7265 35.8691C70.0634 34.747 69.0689 33.8035 67.7428 33.0384C66.4677 32.2734 64.8867 31.8908 62.9995 31.8908C61.1124 31.8908 59.5058 32.2989 58.1798 33.1149C56.9047 33.88 55.9101 34.8745 55.1961 36.0986ZM49.6451 51.5692C49.3076 50.6641 48.8381 49.871 48.2367 49.1898C48.0885 49.0219 47.9323 48.8609 47.7681 48.7067C46.085 47.0746 44.0449 46.2585 41.6478 46.2585C40.1177 46.2585 38.6131 46.5645 37.134 47.1766C35.8594 47.6773 34.6863 48.5438 33.6148 49.7759V61.47C34.6863 62.6664 35.8594 63.5378 37.134 64.084C38.6131 64.6961 40.1177 65.0021 41.6478 65.0021C44.0449 65.0021 46.085 64.1861 47.7681 62.554C49.4512 60.9219 50.2928 58.6012 50.2928 55.5921C50.2928 54.0679 50.0769 52.727 49.6451 51.5692Z" fill="#F19D1E"></path>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 2.2 KiB |
22
docs/.gitignore
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# build output
|
||||||
|
dist/
|
||||||
|
# generated types
|
||||||
|
.astro/
|
||||||
|
.vercel/
|
||||||
|
|
||||||
|
# dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# logs
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
|
||||||
|
|
||||||
|
# environment variables
|
||||||
|
.env
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
# macOS-specific files
|
||||||
|
.DS_Store
|
14
docs/.prettierrc
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"useTabs": true,
|
||||||
|
"printWidth": 100,
|
||||||
|
"semi": false,
|
||||||
|
"plugins": ["prettier-plugin-astro", "prettier-plugin-tailwindcss"],
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": "*.astro",
|
||||||
|
"options": {
|
||||||
|
"parser": "astro"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
107
docs/astro.config.mjs
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
import starlight from "@astrojs/starlight"
|
||||||
|
import tailwind from "@astrojs/tailwind"
|
||||||
|
import { defineConfig } from "astro/config"
|
||||||
|
|
||||||
|
// https://astro.build/config
|
||||||
|
export default defineConfig({
|
||||||
|
site: "https://docs.pesde.daimond113.com",
|
||||||
|
integrations: [
|
||||||
|
starlight({
|
||||||
|
title: "pesde docs",
|
||||||
|
social: {
|
||||||
|
github: "https://github.com/pesde-pkg/pesde",
|
||||||
|
},
|
||||||
|
sidebar: [
|
||||||
|
{
|
||||||
|
label: "Intro",
|
||||||
|
items: [{ slug: "" }, { slug: "installation" }, { slug: "quickstart" }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "Guides",
|
||||||
|
autogenerate: { directory: "guides" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "Reference",
|
||||||
|
autogenerate: { directory: "reference" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "Registry",
|
||||||
|
autogenerate: { directory: "registry" },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
components: {
|
||||||
|
SiteTitle: "./src/components/SiteTitle.astro",
|
||||||
|
},
|
||||||
|
customCss: ["./src/tailwind.css", "@fontsource-variable/nunito-sans"],
|
||||||
|
favicon: "/favicon.ico",
|
||||||
|
head: [
|
||||||
|
{
|
||||||
|
tag: "meta",
|
||||||
|
attrs: {
|
||||||
|
name: "theme-color",
|
||||||
|
content: "#F19D1E",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "meta",
|
||||||
|
attrs: {
|
||||||
|
property: "og:image",
|
||||||
|
content: "/favicon-48x48.png",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "meta",
|
||||||
|
attrs: {
|
||||||
|
name: "twitter:card",
|
||||||
|
content: "summary",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "link",
|
||||||
|
attrs: {
|
||||||
|
rel: "icon",
|
||||||
|
type: "image/png",
|
||||||
|
href: "/favicon-48x48.png",
|
||||||
|
sizes: "48x48",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "link",
|
||||||
|
attrs: {
|
||||||
|
rel: "icon",
|
||||||
|
type: "image/svg+xml",
|
||||||
|
href: "/favicon.svg",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "link",
|
||||||
|
attrs: {
|
||||||
|
rel: "apple-touch-icon",
|
||||||
|
sizes: "180x180",
|
||||||
|
href: "/apple-touch-icon.png",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "meta",
|
||||||
|
attrs: {
|
||||||
|
name: "apple-mobile-web-app-title",
|
||||||
|
content: "pesde docs",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tag: "link",
|
||||||
|
attrs: {
|
||||||
|
rel: "manifest",
|
||||||
|
href: "/site.webmanifest",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
tailwind({
|
||||||
|
applyBaseStyles: false,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
vite: {
|
||||||
|
envDir: "..",
|
||||||
|
},
|
||||||
|
})
|
BIN
docs/bun.lockb
Executable file
29
docs/package.json
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
{
|
||||||
|
"name": "docs",
|
||||||
|
"type": "module",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "astro dev",
|
||||||
|
"start": "astro dev",
|
||||||
|
"build": "astro check && astro build",
|
||||||
|
"preview": "astro preview",
|
||||||
|
"astro": "astro"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@astrojs/check": "^0.9.3",
|
||||||
|
"@astrojs/starlight": "^0.28.2",
|
||||||
|
"@astrojs/starlight-tailwind": "^2.0.3",
|
||||||
|
"@astrojs/tailwind": "^5.1.1",
|
||||||
|
"@fontsource-variable/nunito-sans": "^5.1.0",
|
||||||
|
"@shikijs/rehype": "^1.21.0",
|
||||||
|
"astro": "^4.15.9",
|
||||||
|
"sharp": "^0.33.5",
|
||||||
|
"shiki": "^1.21.0",
|
||||||
|
"tailwindcss": "^3.4.13",
|
||||||
|
"typescript": "^5.6.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier-plugin-astro": "^0.14.1",
|
||||||
|
"prettier-plugin-tailwindcss": "^0.6.8"
|
||||||
|
}
|
||||||
|
}
|
BIN
docs/public/apple-touch-icon.png
Normal file
After Width: | Height: | Size: 3.9 KiB |
BIN
docs/public/favicon-48x48.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
docs/public/favicon.ico
Normal file
After Width: | Height: | Size: 15 KiB |
3
docs/public/favicon.svg
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<svg viewBox="0 0 100 100" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6025 0L92.9038 25V75L49.6025 100L6.30127 75V25L49.6025 0ZM14.3013 29.6188L49.6025 9.2376L84.9038 29.6188V70.3812L49.6025 90.7624L33.6148 81.5319V67.3848C34.5167 68.5071 35.6388 69.4215 36.981 70.1279C38.9701 71.148 41.0357 71.658 43.1779 71.658C46.442 71.658 49.1452 70.8929 51.2873 69.3629C53.4805 67.7818 55.1126 65.7672 56.1836 63.319C57.0915 61.3382 57.632 59.274 57.8054 57.1263C59.8723 57.7457 62.2157 58.0554 64.8356 58.0554C67.6918 58.0554 70.3695 57.6473 72.8686 56.8313C75.3678 55.9642 77.4079 54.8167 78.989 53.3886L75.7758 47.8038C74.5517 48.9258 72.9961 49.8439 71.109 50.5579C69.2219 51.221 67.2073 51.5525 65.0652 51.5525C61.3929 51.5525 58.6643 50.6854 56.8792 48.9513C56.7195 48.7962 56.567 48.6365 56.4217 48.472C55.6102 47.5539 55.0211 46.4896 54.6546 45.2791L54.6443 45.2452L54.669 45.2791H79.2185V41.9894C79.2185 39.0313 78.5555 36.3536 77.2294 33.9565C75.9543 31.5593 74.0927 29.6467 71.6445 28.2186C69.2474 26.7395 66.3657 26 62.9995 26C59.6843 26 56.8027 26.7395 54.3545 28.2186C51.9064 29.6467 50.0193 31.5593 48.6932 33.9565C47.6743 35.7983 47.0469 37.8057 46.8108 39.9788C45.6888 39.728 44.4778 39.6026 43.1779 39.6026C41.0357 39.6026 38.9701 40.1127 36.981 41.1327C35.3162 41.9651 33.9902 43.1549 33.0028 44.7023V40.3677H20.6855V46.2585H25.8113V77.0266L14.3013 70.3812V29.6188ZM55.1961 36.0986C54.6528 37.1015 54.3321 38.1216 54.234 39.1588H71.7976C71.7976 38.0367 71.4405 36.9401 70.7265 35.8691C70.0634 34.747 69.0689 33.8035 67.7428 33.0384C66.4677 32.2734 64.8867 31.8908 62.9995 31.8908C61.1124 31.8908 59.5058 32.2989 58.1798 33.1149C56.9047 33.88 55.9101 34.8745 55.1961 36.0986ZM49.6451 51.5692C49.3076 50.6641 48.8381 49.871 48.2367 49.1898C48.0885 49.0219 47.9323 48.8609 47.7681 48.7067C46.085 47.0746 44.0449 46.2585 41.6478 46.2585C40.1177 46.2585 38.6131 46.5645 37.134 47.1766C35.8594 47.6773 34.6863 48.5438 33.6148 49.7759V61.47C34.6863 62.6664 35.8594 63.5378 37.134 64.084C38.6131 64.6961 40.1177 65.0021 41.6478 65.0021C44.0449 65.0021 46.085 64.1861 47.7681 62.554C49.4512 60.9219 50.2928 58.6012 50.2928 55.5921C50.2928 54.0679 50.0769 52.727 49.6451 51.5692Z" fill="#F19D1E"></path>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 2.2 KiB |
21
docs/public/site.webmanifest
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"name": "pesde",
|
||||||
|
"short_name": "pesde",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "/web-app-manifest-192x192.png",
|
||||||
|
"sizes": "192x192",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "maskable"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "/web-app-manifest-512x512.png",
|
||||||
|
"sizes": "512x512",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "maskable"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"theme_color": "#f19d1e",
|
||||||
|
"background_color": "#0a0704",
|
||||||
|
"display": "standalone"
|
||||||
|
}
|
BIN
docs/public/web-app-manifest-192x192.png
Normal file
After Width: | Height: | Size: 4.1 KiB |
BIN
docs/public/web-app-manifest-512x512.png
Normal file
After Width: | Height: | Size: 15 KiB |
36
docs/src/components/SiteTitle.astro
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
<div class="flex items-center">
|
||||||
|
<a
|
||||||
|
href="https://pesde.daimond113.com/"
|
||||||
|
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
viewBox="0 0 56 28"
|
||||||
|
class="h-7"
|
||||||
|
fill="none"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
>
|
||||||
|
<title>pesde</title>
|
||||||
|
<path
|
||||||
|
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
|
||||||
|
fill="currentColor"></path>
|
||||||
|
<path
|
||||||
|
d="M18.37 17.7448C17.0563 17.7448 15.8592 17.4694 14.7786 16.9185C13.7192 16.3676 12.8717 15.5942 12.236 14.5984C11.6216 13.6026 11.3144 12.4478 11.3144 11.1341C11.3144 9.77811 11.611 8.61277 12.2043 7.63813C12.7975 6.64229 13.5921 5.87953 14.5879 5.34983C15.5837 4.79894 16.6961 4.5235 17.925 4.5235C19.1963 4.5235 20.2875 4.78835 21.1986 5.31805C22.1308 5.84775 22.8406 6.59992 23.3279 7.57456C23.8153 8.54921 24.0589 9.69336 24.0589 11.007V11.6109H13.3802L13.412 10.1489H21.7388C21.7388 9.32257 21.5693 8.62337 21.2303 8.05129C20.9125 7.45803 20.4676 7.01308 19.8955 6.71645C19.3234 6.39863 18.6666 6.23972 17.925 6.23972C17.1411 6.23972 16.4313 6.43042 15.7956 6.8118C15.16 7.17199 14.6515 7.70169 14.2701 8.4009C13.9099 9.07891 13.7298 9.90524 13.7298 10.8799C13.7298 11.9181 13.9205 12.8186 14.3019 13.5814C14.6833 14.3229 15.213 14.9056 15.891 15.3294C16.5902 15.732 17.3847 15.9332 18.2746 15.9332C19.2281 15.9332 20.1074 15.7425 20.9125 15.3612C21.7177 14.9798 22.4169 14.503 23.0101 13.931L23.8365 15.3612C23.2644 16.018 22.5228 16.5795 21.6117 17.0456C20.7006 17.5117 19.6201 17.7448 18.37 17.7448Z"
|
||||||
|
fill="currentColor"></path>
|
||||||
|
<path
|
||||||
|
d="M28.7199 22.6288C28.0631 22.6288 27.4275 22.5441 26.813 22.3746C26.2198 22.2051 25.7007 21.972 25.2557 21.6754C24.8108 21.3788 24.4718 21.0292 24.2387 20.6266L24.7154 20.9126V22.311H22.8721V17.9887H24.5247L24.7154 20.0545L24.2705 18.5608C24.5035 19.3447 25.0227 19.9486 25.8278 20.3723C26.6541 20.7961 27.5122 21.008 28.4021 21.008C29.2073 21.008 29.8747 20.8491 30.4044 20.5312C30.9553 20.2134 31.2307 19.7685 31.2307 19.1964C31.2307 18.5184 30.9977 18.0522 30.5315 17.798C30.0866 17.5225 29.3662 17.2789 28.3703 17.067L26.6223 16.6856C25.457 16.389 24.5353 15.997 23.8573 15.5097C23.1793 15.0012 22.8403 14.1642 22.8403 12.9989C22.8403 11.8759 23.2746 11.0178 24.1434 10.4245C25.0332 9.81009 26.135 9.50286 27.4487 9.50286C27.9572 9.50286 28.4869 9.57702 29.0378 9.72534C29.6098 9.85246 30.129 10.0538 30.5951 10.3292C31.0612 10.6046 31.3896 10.9436 31.5803 11.3462L31.1036 11.1873V9.75712H32.9787V14.0477H31.3261L30.9129 11.0284L31.4532 13.126C31.3684 12.7446 31.1248 12.4162 30.7222 12.1408C30.3408 11.8441 29.8853 11.6111 29.3556 11.4416C28.8471 11.2721 28.3386 11.1873 27.8301 11.1873C27.152 11.1873 26.5376 11.325 25.9867 11.6005C25.457 11.8759 25.1922 12.3209 25.1922 12.9353C25.1922 13.4015 25.3723 13.7617 25.7325 14.0159C26.1138 14.2702 26.7283 14.5033 27.5758 14.7151L29.1967 15.0647C30.0018 15.2554 30.7222 15.4885 31.3579 15.7639C32.0147 16.0182 32.5338 16.3996 32.9152 16.9081C33.2966 17.3954 33.4872 18.0946 33.4872 19.0057C33.4872 19.832 33.2542 20.5206 32.788 21.0715C32.3431 21.6012 31.7498 21.9932 31.0083 22.2475C30.2879 22.5017 29.5251 22.6288 28.7199 22.6288Z"
|
||||||
|
fill="currentColor"></path>
|
||||||
|
<path
|
||||||
|
d="M37.1104 18.5607C35.9662 18.5607 34.9068 18.3064 33.9322 17.7979C32.9787 17.2682 32.2054 16.5054 31.6121 15.5096C31.0188 14.5138 30.7222 13.3272 30.7222 11.95C30.7222 10.5304 30.9977 9.34389 31.5485 8.39043C32.1206 7.41579 32.8728 6.67421 33.8051 6.1657C34.7373 5.65719 35.7544 5.40293 36.8561 5.40293C37.746 5.40293 38.6253 5.57243 39.494 5.91144C40.3839 6.22926 41.1679 6.6848 41.8459 7.27807L41.0831 7.5641V1.65266H38.8584V0H43.435V16.5266H45.4055V18.2111H41.0831V16.5584L41.8141 16.6855C41.1997 17.2788 40.5005 17.7449 39.7165 18.0839C38.9537 18.4018 38.085 18.5607 37.1104 18.5607ZM37.6189 16.7809C38.4452 16.7809 39.208 16.622 39.9072 16.3042C40.6276 15.9863 41.2844 15.5626 41.8777 15.0329L41.0831 16.1135V7.85014L41.8777 8.99429C41.2208 8.42221 40.4793 7.97727 39.6529 7.65945C38.8478 7.34163 38.0744 7.18272 37.3329 7.18272C36.5277 7.18272 35.8073 7.37341 35.1717 7.75479C34.5572 8.13618 34.0699 8.69766 33.7097 9.43924C33.3495 10.1596 33.1694 11.0601 33.1694 12.1407C33.1694 13.1366 33.3707 13.9841 33.7733 14.6833C34.1759 15.3613 34.7056 15.8804 35.3624 16.2406C36.0404 16.6008 36.7926 16.7809 37.6189 16.7809Z"
|
||||||
|
fill="currentColor"></path>
|
||||||
|
<path
|
||||||
|
d="M50.3188 24.2004C49.0051 24.2004 47.808 23.925 46.7274 23.3741C45.668 22.8232 44.8205 22.0498 44.1848 21.054C43.5704 20.0582 43.2632 18.9034 43.2632 17.5898C43.2632 16.2337 43.5598 15.0684 44.1531 14.0937C44.7463 13.0979 45.5409 12.3351 46.5367 11.8054C47.5325 11.2545 48.6449 10.9791 49.8738 10.9791C51.1451 10.9791 52.2363 11.2439 53.1474 11.7736C54.0796 12.3033 54.7894 13.0555 55.2767 14.0302C55.7641 15.0048 56.0077 16.149 56.0077 17.4626V18.0665H45.329L45.3608 16.6045H53.6876C53.6876 15.7782 53.5181 15.079 53.1791 14.5069C52.8613 13.9136 52.4164 13.4687 51.8443 13.172C51.2722 12.8542 50.6154 12.6953 49.8738 12.6953C49.0899 12.6953 48.3801 12.886 47.7444 13.2674C47.1088 13.6276 46.6003 14.1573 46.2189 14.8565C45.8587 15.5345 45.6786 16.3609 45.6786 17.3355C45.6786 18.3737 45.8693 19.2742 46.2507 20.037C46.6321 20.7786 47.1617 21.3612 47.8398 21.785C48.539 22.1876 49.3335 22.3888 50.2234 22.3888C51.1769 22.3888 52.0562 22.1982 52.8613 21.8168C53.6665 21.4354 54.3657 20.9587 54.9589 20.3866L55.7853 21.8168C55.2132 22.4736 54.4716 23.0351 53.5605 23.5012C52.6494 23.9673 51.5688 24.2004 50.3188 24.2004Z"
|
||||||
|
fill="currentColor"></path>
|
||||||
|
</svg>
|
||||||
|
</a>
|
||||||
|
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
|
||||||
|
>
|
||||||
|
<a
|
||||||
|
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
|
||||||
|
href="/">docs</a
|
||||||
|
>
|
||||||
|
</div>
|
6
docs/src/content/config.ts
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
import { defineCollection } from "astro:content"
|
||||||
|
import { docsSchema } from "@astrojs/starlight/schema"
|
||||||
|
|
||||||
|
export const collections = {
|
||||||
|
docs: defineCollection({ schema: docsSchema() }),
|
||||||
|
}
|
74
docs/src/content/docs/guides/binary-packages.mdx
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
---
|
||||||
|
title: Using Binary Packages
|
||||||
|
description: Learn how to use binary packages.
|
||||||
|
---
|
||||||
|
|
||||||
|
A **binary package** is a package that contains a binary export.
|
||||||
|
|
||||||
|
Binary packages can be run like a normal program. There are several ways to use
|
||||||
|
binary packages with pesde.
|
||||||
|
|
||||||
|
## Using a binary package
|
||||||
|
|
||||||
|
### With `pesde x`
|
||||||
|
|
||||||
|
The `pesde x` command can be used to run a one-off binary package. This is
|
||||||
|
useful for running a binary package without installing it or outside of a pesde
|
||||||
|
project.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde x pesde/hello
|
||||||
|
# Hello, pesde! (pesde/hello@1.0.0, lune)
|
||||||
|
```
|
||||||
|
|
||||||
|
### By installing
|
||||||
|
|
||||||
|
Binary packages can be installed using the `pesde add` and `pesde install`
|
||||||
|
commands.
|
||||||
|
|
||||||
|
This requires a `pesde.toml` file to be present in the current directory, and
|
||||||
|
will add the binary package to the `dependencies` section of the file.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/hello
|
||||||
|
pesde install
|
||||||
|
```
|
||||||
|
|
||||||
|
This will add the binary package to your `PATH`, meaning that it can be run
|
||||||
|
anywhere in a project which has it installed under that alias!
|
||||||
|
|
||||||
|
```sh
|
||||||
|
hello
|
||||||
|
# Hello, pesde! (pesde/hello@1.0.0, lune)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Making a binary package
|
||||||
|
|
||||||
|
To make a binary package you must use a target compatible with binary exports.
|
||||||
|
These currently are `lune` and `luau`.
|
||||||
|
|
||||||
|
Here is an example of a binary package:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
name = "pesde/hello"
|
||||||
|
version = "1.0.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "lune"
|
||||||
|
bin = "main.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
The `bin` field specifies the entry point for the binary package. This file
|
||||||
|
will be run when the binary package is executed.
|
||||||
|
|
||||||
|
```luau title="main.luau"
|
||||||
|
print("Hello, pesde!")
|
||||||
|
```
|
||||||
|
|
||||||
|
Binary packages get access to custom variables provided by pesde. You can find
|
||||||
|
them in the `_G` table. These are:
|
||||||
|
|
||||||
|
- `PESDE_ROOT`: The root (where the pesde.toml is located) of where the package is
|
||||||
|
installed. This will be in a temporary directory if the package is run with
|
||||||
|
`pesde x`.
|
170
docs/src/content/docs/guides/dependencies.mdx
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
---
|
||||||
|
title: Specifying Dependencies
|
||||||
|
description: Learn how to specify dependencies in your pesde project.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Aside, FileTree, LinkCard } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
The `[dependencies]` section of your `pesde.toml` file is where you specify the
|
||||||
|
dependencies of your project.
|
||||||
|
|
||||||
|
pesde supports multiple types of dependencies.
|
||||||
|
|
||||||
|
## pesde Dependencies
|
||||||
|
|
||||||
|
The most common type of dependency are pesde dependencies. These are
|
||||||
|
dependencies on packages published to a [pesde registry](https://pesde.daimond113.com).
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[indices]
|
||||||
|
default = "https://github.com/pesde-pkg/index"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
hello = { name = "pesde/hello", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, we're specifying a dependency on the `pesde/hello` package on
|
||||||
|
the official pesde registry with a version constraint of `^1.0.0`.
|
||||||
|
|
||||||
|
You can also add a dependency by running the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/hello
|
||||||
|
```
|
||||||
|
|
||||||
|
## Git Dependencies
|
||||||
|
|
||||||
|
Git dependencies are dependencies on packages hosted on a Git repository.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
acme = { repo = "acme/package", rev = "aeff6" }
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, we're specifying a dependency on the package contained within
|
||||||
|
the `acme/package` GitHub repository at the `aeff6` commit.
|
||||||
|
|
||||||
|
You can also use a URL to specify the Git repository and a tag for the revision.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
acme = { repo = "https://git.acme.local/package.git", rev = "v0.1.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also specify a path if the package is not at the root of the repository.
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- acme/package.git
|
||||||
|
- pkgs/
|
||||||
|
- **foo/**
|
||||||
|
- pesde.toml
|
||||||
|
- ...
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
foo = { repo = "acme/package", rev = "main", path = "pkgs/foo" }
|
||||||
|
```
|
||||||
|
|
||||||
|
The path specified by the Git dependency must either be a valid pesde package or
|
||||||
|
a [Wally][wally] package.
|
||||||
|
|
||||||
|
You can also add a Git dependency by running the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# From Git URL
|
||||||
|
pesde add https://git.acme.local/package.git#aeff6
|
||||||
|
|
||||||
|
# From GitHub repository
|
||||||
|
pesde add gh#acme/package#main
|
||||||
|
```
|
||||||
|
|
||||||
|
## Wally Dependencies
|
||||||
|
|
||||||
|
Wally dependencies are dependencies on packages published to a
|
||||||
|
[Wally registry][wally]. Wally is a package manager for Roblox and thus Wally
|
||||||
|
dependencies should only be used in Roblox projects.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[wally_indices]
|
||||||
|
default = "https://github.com/UpliftGames/wally-index"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
foo = { wally = "acme/package", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, we're specifying a dependency on the `acme/package` package
|
||||||
|
on the official Wally registry with a version constraint of `^1.0.0`.
|
||||||
|
|
||||||
|
<Aside type="note">
|
||||||
|
|
||||||
|
In order to get proper types support for Wally dependencies, you need to have
|
||||||
|
a [`sourcemap_generator` script](/reference/manifest#sourcemap_generator)
|
||||||
|
specified in your `pesde.toml` file.
|
||||||
|
|
||||||
|
</Aside>
|
||||||
|
|
||||||
|
You can also add a Wally dependency by running the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add wally#acme/package
|
||||||
|
```
|
||||||
|
|
||||||
|
[wally]: https://wally.run/
|
||||||
|
|
||||||
|
## Workspace Dependencies
|
||||||
|
|
||||||
|
Packages within a workspace can depend on each other. For example, if `foo`
|
||||||
|
and `bar` are both packages in the same workspace, you can add a dependency to
|
||||||
|
`bar` in the `foo/pesde.toml` file:
|
||||||
|
|
||||||
|
```toml title="foo/pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
bar = { workspace = "acme/bar", version = "^" }
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also add a workspace dependency by running the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add workspace:acme/bar
|
||||||
|
```
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Workspaces"
|
||||||
|
description="Learn more about using workspaces in pesde."
|
||||||
|
href="/guides/workspaces/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## Peer Dependencies
|
||||||
|
|
||||||
|
Peer dependencies are dependencies that are not installed automatically when
|
||||||
|
used by another package. They need to be installed by the user of the package.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[peer_dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
You can add a peer dependency by passing `--peer` to the `pesde add` command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add --peer acme/foo
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dev Dependencies
|
||||||
|
|
||||||
|
Dev dependencies are dependencies that are only used during development. They
|
||||||
|
are not installed when the package is used as a dependency.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dev_dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
You can add a dev dependency by passing `--dev` to the `pesde add` command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add --dev acme/foo
|
||||||
|
```
|
79
docs/src/content/docs/guides/overrides.mdx
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
---
|
||||||
|
title: Overriding Dependencies
|
||||||
|
description: Learn how to override and patch dependencies in pesde.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Aside } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
pesde has several ways to override or patch dependencies in your project.
|
||||||
|
|
||||||
|
## Dependency Overrides
|
||||||
|
|
||||||
|
Dependency overrides allow you to replace a dependency of a dependency with a
|
||||||
|
different version or package.
|
||||||
|
|
||||||
|
Let's say you have a project with the following dependencies:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
But `foo` depends on `bar` 1.0.0, and you want to use `bar` 2.0.0 instead. You
|
||||||
|
can override the `bar` dependency in your `pesde.toml` file:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||||
|
|
||||||
|
[overrides]
|
||||||
|
"foo>bar" = { name = "acme/bar", version = "^2.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, when you run `pesde install`, `bar` 2.0.0 will be used instead of 1.0.0.
|
||||||
|
|
||||||
|
You can learn more about the syntax for dependency overrides in the
|
||||||
|
[reference](/reference/manifest#overrides).
|
||||||
|
|
||||||
|
## Patching Dependencies
|
||||||
|
|
||||||
|
Patching allows you to modify the source code of a dependency.
|
||||||
|
|
||||||
|
To patch a dependency, you can use the `pesde patch` and `pesde patch-commit`
|
||||||
|
commands.
|
||||||
|
|
||||||
|
Let's say you have the following dependency in your `pesde.toml` file:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
And you want to patch `foo` to fix a bug. You can run the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde patch "acme/foo@1.0.0 luau"
|
||||||
|
|
||||||
|
# done! modify the files in the directory, then run `pesde patch-commit /x/y/z`
|
||||||
|
# to apply.
|
||||||
|
# warning: do not commit these changes
|
||||||
|
# note: the pesde.toml file will be ignored when patching
|
||||||
|
```
|
||||||
|
|
||||||
|
pesde will copy the source code of `foo` to a temporary directory, in this case
|
||||||
|
`/x/y/z`. You can then modify the files in this directory. Once you're done,
|
||||||
|
run `pesde patch-commit /x/y/z` to apply the changes.
|
||||||
|
|
||||||
|
This will create a patch within the `patches` directory of your project, and
|
||||||
|
add an entry to `[patches]`. Then, next time you run `pesde install`, the patch
|
||||||
|
will be applied to the dependency.
|
||||||
|
|
||||||
|
<Aside type="caution">
|
||||||
|
|
||||||
|
Make sure not to commit or stage the changes made in the temporary directory.
|
||||||
|
Otherwise pesde may not be able to create the patch correctly.
|
||||||
|
|
||||||
|
</Aside>
|
136
docs/src/content/docs/guides/publishing.mdx
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
---
|
||||||
|
title: Publishing Packages
|
||||||
|
description: Learn how to publish packages to the pesde registry.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Aside, LinkCard } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Before you can publish a package, you must configure the required fields in your
|
||||||
|
`pesde.toml` file.
|
||||||
|
|
||||||
|
### `includes`
|
||||||
|
|
||||||
|
The `includes` field is a list of globs that should be included in the package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
includes = [
|
||||||
|
"pesde.toml",
|
||||||
|
"README.md",
|
||||||
|
"LICENSE",
|
||||||
|
"src/**/*.luau",
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### `target`
|
||||||
|
|
||||||
|
The `target` field defines the environment where the package can be run.
|
||||||
|
|
||||||
|
Here, you must also specify the `lib` and/or `bin` fields to indicate the path
|
||||||
|
of the exported library or binary.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
lib = "init.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Roblox
|
||||||
|
|
||||||
|
`bin` is not supported in Roblox packages. You must also specify a list of
|
||||||
|
`build_files`. These are the files that should be synced into Roblox. They are
|
||||||
|
passed to the `roblox_sync_config_generator` script.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[target]
|
||||||
|
environment = "roblox"
|
||||||
|
lib = "src/init.luau"
|
||||||
|
build_files = ["src"]
|
||||||
|
```
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Roblox"
|
||||||
|
description="Learn more about authoring packages for Roblox."
|
||||||
|
href="/guides/roblox/#authoring-packages"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
Before you can publish a package, you must authenticate with your GitHub
|
||||||
|
account.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde auth login
|
||||||
|
```
|
||||||
|
|
||||||
|
You will be given a code and prompted to open the GitHub authentication page in
|
||||||
|
your browser. You must enter the code to authenticate.
|
||||||
|
|
||||||
|
## Publishing
|
||||||
|
|
||||||
|
To publish a package, run the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde publish
|
||||||
|
```
|
||||||
|
|
||||||
|
You will be prompted to confirm the package details before publishing.
|
||||||
|
|
||||||
|
Once a package is published, others will be able to install it. You may not
|
||||||
|
remove a package once it has been published. You may not publish a package with
|
||||||
|
an already existing version.
|
||||||
|
|
||||||
|
## Multi-target Packages
|
||||||
|
|
||||||
|
You may publish packages under the same name and version but with different
|
||||||
|
targets. This allows you to publish a package that can be used in multiple
|
||||||
|
environments.
|
||||||
|
|
||||||
|
For example, you may publish a package that can be used in both Roblox and
|
||||||
|
Luau environments by publishing two versions of the package, one for each
|
||||||
|
environment.
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
The `README.md` file in the root of the package will be displayed on the
|
||||||
|
[pesde registry website](https://pesde.daimond113.com/).
|
||||||
|
|
||||||
|
You can include a `docs` directory in the package containing markdown files
|
||||||
|
and they will be available on the pesde registry website. You can see an example
|
||||||
|
in [`pesde/hello`](https://pesde.daimond113.com/packages/pesde/hello/latest/any/docs).
|
||||||
|
|
||||||
|
### Customizing the sidebar
|
||||||
|
|
||||||
|
You can include frontmatter with a `sidebar_position` to customize the order
|
||||||
|
of the pages on the sidebar.
|
||||||
|
|
||||||
|
```md title="docs/getting-started.md"
|
||||||
|
---
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Getting Started
|
||||||
|
|
||||||
|
Lorem ipsum odor amet, consectetuer adipiscing elit. Eleifend consectetur id
|
||||||
|
consequat conubia fames curae?
|
||||||
|
```
|
||||||
|
|
||||||
|
You can have directories in the `docs` directory to create nested pages. These
|
||||||
|
will show up as collapsible sections in the sidebar. You can include a
|
||||||
|
`_category_.json` file inside the nested directories to customize the label and
|
||||||
|
the ordering in the sidebar.
|
||||||
|
|
||||||
|
```json title="docs/guides/_category_.json"
|
||||||
|
{
|
||||||
|
"label": "Guides",
|
||||||
|
"position": 3
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<Aside type="tip">
|
||||||
|
|
||||||
|
Make sure to include `docs` inside the `includes` field in `pesde.toml`
|
||||||
|
otherwise they won't be published with your package.
|
||||||
|
|
||||||
|
</Aside>
|
229
docs/src/content/docs/guides/roblox.mdx
Normal file
|
@ -0,0 +1,229 @@
|
||||||
|
---
|
||||||
|
title: Roblox
|
||||||
|
description: Using pesde in a Roblox project.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { FileTree } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
pesde can be used in Roblox projects, however this requires some extra setup.
|
||||||
|
Namely, you need to specify a `roblox_sync_config_generator` script in order
|
||||||
|
to generate the adequate configuration for the sync tool you are using.
|
||||||
|
|
||||||
|
The [`pesde-scripts`](https://github.com/pesde-pkg/scripts)
|
||||||
|
repository contains a list of scripts for different sync tools. If the tool
|
||||||
|
you are using is not supported, you can write your own script and submit a PR
|
||||||
|
to get it added.
|
||||||
|
|
||||||
|
## Usage with Rojo
|
||||||
|
|
||||||
|
[Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox
|
||||||
|
Studio.
|
||||||
|
|
||||||
|
Running `pesde init` will prompt you to select a target, select
|
||||||
|
`roblox` or `roblox_server` in this case. You will be prompted to pick out a
|
||||||
|
scripts package. Select `pesde/scripts_rojo` to get started with Rojo.
|
||||||
|
|
||||||
|
## Usage with other tools
|
||||||
|
|
||||||
|
If you are using a different sync tool, you should look for it's scripts
|
||||||
|
package on the registry. If you cannot find it, you can write your own and
|
||||||
|
optionally submit a PR to pesde-scripts to help others using the same tool as
|
||||||
|
you get started quicker.
|
||||||
|
|
||||||
|
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
|
||||||
|
target, and then create a `.pesde/roblox_sync_config_generator.luau` script
|
||||||
|
and put it's path in the manifest.
|
||||||
|
|
||||||
|
## Authoring packages
|
||||||
|
|
||||||
|
When authoring packages for Roblox, it is recommended to have your code inside
|
||||||
|
of a `src` directory (or any other directory you prefer).
|
||||||
|
|
||||||
|
Inside of your `pesde.toml` you must specify the `roblox` environment and the
|
||||||
|
`lib` field with the path to your main script. You must also specify a list of
|
||||||
|
`build_files`. This list should contain names of top level files or directories
|
||||||
|
that should be synced into Roblox by a sync tool, such as Rojo.
|
||||||
|
|
||||||
|
Let's say you have a package with the following structure:
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- roblox_packages/
|
||||||
|
- dependency.luau
|
||||||
|
- ...
|
||||||
|
- src/
|
||||||
|
- init.luau
|
||||||
|
- foo.luau
|
||||||
|
- bar.luau
|
||||||
|
- ...
|
||||||
|
- LICENSE
|
||||||
|
- pesde.toml
|
||||||
|
- README.md
|
||||||
|
- selene.toml
|
||||||
|
- stylua.toml
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
There are lots of files in the root directory that are not needed in Roblox,
|
||||||
|
such as configuration files, READMEs, and licenses. We only want the `src` and
|
||||||
|
the `roblox_packages` directory to be synced into Roblox.
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- roblox_packages/
|
||||||
|
- dependency (roblox_packages/dependency.luau)
|
||||||
|
- ...
|
||||||
|
- src/ (src/init.luau)
|
||||||
|
- foo (src/foo.luau)
|
||||||
|
- bar (src/bar.luau)
|
||||||
|
- ...
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
This is where `build_files` come in, we can specify a list of files that should
|
||||||
|
be synced into Roblox. In this case, we only want the `src` directory to be
|
||||||
|
synced. We do not need to specify the `roblox_packages` directory, as it is
|
||||||
|
always synced.
|
||||||
|
|
||||||
|
So for our package, the `pesde.toml` file would roughly look like this:
|
||||||
|
|
||||||
|
```toml title="pesde.toml" {15}
|
||||||
|
name = "acme/package"
|
||||||
|
version = "1.0.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
includes = [
|
||||||
|
"pesde.toml",
|
||||||
|
"LICENSE",
|
||||||
|
"README.md",
|
||||||
|
"src/**/*.luau",
|
||||||
|
]
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "roblox"
|
||||||
|
lib = "src/init.luau"
|
||||||
|
build_files = ["src"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
dependency = "acme/library"
|
||||||
|
```
|
||||||
|
|
||||||
|
When a consumer of your package installs it, the `roblox_sync_config_generator`
|
||||||
|
script they are using will generate the configuration needed for their sync
|
||||||
|
tool. For example, a Rojo user would get a `default.project.json` with the
|
||||||
|
following contents:
|
||||||
|
|
||||||
|
```json title="default.project.json"
|
||||||
|
{
|
||||||
|
"tree": {
|
||||||
|
"src": {
|
||||||
|
"$path": "src"
|
||||||
|
},
|
||||||
|
"roblox_packages": {
|
||||||
|
"$path": "roblox_packages"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The linker scripts that pesde generates will then point to the `src` module.
|
||||||
|
|
||||||
|
Then, to publish your package, you can follow the steps in the
|
||||||
|
["Publishing Packages"](/guides/publishing/) guide.
|
||||||
|
|
||||||
|
### Test place with Rojo
|
||||||
|
|
||||||
|
You might want to create a "test place" where you can test your package inside
|
||||||
|
Roblox, or to get proper LSP support when developing your package.
|
||||||
|
|
||||||
|
To do this, you can create a `test-place.project.json` file which includes your
|
||||||
|
package and the `roblox_packages` directory.
|
||||||
|
|
||||||
|
```json title="test-place.project.json"
|
||||||
|
{
|
||||||
|
"tree": {
|
||||||
|
"$className": "DataModel",
|
||||||
|
"ReplicatedStorage": {
|
||||||
|
"package": {
|
||||||
|
"$className": "Folder",
|
||||||
|
"src": {
|
||||||
|
"$path": "src"
|
||||||
|
},
|
||||||
|
"roblox_packages": {
|
||||||
|
"$path": "roblox_packages"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can then run `rojo serve` with this project file:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
rojo serve test-place.project.json
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using [Luau LSP](https://github.com/JohnnyMorganz/luau-lsp) you can
|
||||||
|
change the `luau-lsp.sourcemap.rojoProjectFile` extension setting to
|
||||||
|
`test-place.project.json` to get proper LSP support when developing your
|
||||||
|
package.
|
||||||
|
|
||||||
|
### Differences from Wally
|
||||||
|
|
||||||
|
Those coming from [Wally](https://wally.run/) may be a bit confused by the
|
||||||
|
way pesde handles Roblox packages.
|
||||||
|
|
||||||
|
In Wally, it is standard to have a `default.project.json` with the following:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tree": {
|
||||||
|
"$path": "src"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This will cause the `src` directory to be directly synced into Roblox.
|
||||||
|
|
||||||
|
In pesde, you should not have a `default.project.json` file in your package.
|
||||||
|
Instead, you are required to use the `build_files` field to specify a 1:1 match
|
||||||
|
between Roblox and the file system. pesde forbids `default.project.json` to be
|
||||||
|
part of a published package, and regenerates it when installing a pesde git
|
||||||
|
dependency. This allows the consumer of your package to choose the sync tool
|
||||||
|
they want to use, instead of being constrained to only using Rojo.
|
||||||
|
|
||||||
|
This has the effect that the structure of the files in the file system ends up
|
||||||
|
being reflected inside Roblox.
|
||||||
|
|
||||||
|
With Wally, the structure that ends up in Roblox ends up looking like this:
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- Packages/
|
||||||
|
- \_Index/
|
||||||
|
- acme_package@1.0.0/
|
||||||
|
- package/ (src/init.luau)
|
||||||
|
- foo (src/foo.luau)
|
||||||
|
- bar (src/bar.luau)
|
||||||
|
- ...
|
||||||
|
- dependency
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
Whereas with pesde, it looks like this:
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- roblox_packages/
|
||||||
|
- .pesde/
|
||||||
|
- acme+package/
|
||||||
|
- 1.0.0/
|
||||||
|
- src/ (src/init.luau)
|
||||||
|
- foo (src/foo.luau)
|
||||||
|
- bar (src/bar.luau)
|
||||||
|
- ...
|
||||||
|
- roblox_packages/
|
||||||
|
- dependency (roblox_packages/dependency.luau)
|
||||||
|
|
||||||
|
</FileTree>
|
53
docs/src/content/docs/guides/scripts-packages.mdx
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
title: Using Scripts Packages
|
||||||
|
description: Learn how to use scripts packages.
|
||||||
|
---
|
||||||
|
|
||||||
|
A **scripts package** is a package that contains scripts. The scripts provided
|
||||||
|
by the package are linked in `.pesde/{alias}/{script_name}.luau` of the project
|
||||||
|
that uses the package.
|
||||||
|
|
||||||
|
## Using a scripts package
|
||||||
|
|
||||||
|
Scripts packages can be installed using the `pesde add` and `pesde install`
|
||||||
|
commands.
|
||||||
|
|
||||||
|
This requires a `pesde.toml` file to be present in the current directory, and
|
||||||
|
will add the scripts package to the `dependencies` section of the file.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/scripts_rojo
|
||||||
|
pesde install
|
||||||
|
```
|
||||||
|
|
||||||
|
This will add the scripts package to your project, and installing will put the
|
||||||
|
scripts at `.pesde/scripts_rojo/{script_name}.luau`. You can then add the scripts
|
||||||
|
to your manifest, for example:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[scripts]
|
||||||
|
roblox_sync_config_generator = ".pesde/scripts_rojo/roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Making a scripts package
|
||||||
|
|
||||||
|
To make a scripts package you must use a target compatible with scripts exports.
|
||||||
|
These currently are `lune` and `luau`.
|
||||||
|
|
||||||
|
Here is an example of a scripts package:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
name = "pesde/scripts_rojo"
|
||||||
|
version = "1.0.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "lune"
|
||||||
|
|
||||||
|
[target.scripts]
|
||||||
|
roblox_sync_config_generator = "roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
The `scripts` table in the target is a map of script names to the path of the
|
||||||
|
script in the package. The scripts will be linked in the project that uses the
|
||||||
|
package at `.pesde/{alias}/{script_name}.luau`.
|
222
docs/src/content/docs/guides/self-hosting-registries.mdx
Normal file
|
@ -0,0 +1,222 @@
|
||||||
|
---
|
||||||
|
title: Self Hosting Registries
|
||||||
|
description: Learn how to self host registries for pesde.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Aside } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
You can self host registries for pesde. This is useful if you want a private
|
||||||
|
registry or if you a separate registry for other reasons.
|
||||||
|
|
||||||
|
## Making the index repository
|
||||||
|
|
||||||
|
The index is a repository that contains metadata about all the packages in the
|
||||||
|
registry.
|
||||||
|
|
||||||
|
An index contains a `config.toml` file with configuration options.
|
||||||
|
|
||||||
|
To create an index, create a new repository and add a `config.toml` file with
|
||||||
|
the following content:
|
||||||
|
|
||||||
|
```toml title="config.toml"
|
||||||
|
# the URL of the registry API
|
||||||
|
api = "https://registry.acme.local/"
|
||||||
|
|
||||||
|
# package download URL (optional)
|
||||||
|
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
|
||||||
|
|
||||||
|
# the client ID of the GitHub OAuth app (optional)
|
||||||
|
github_oauth_client_id = "a1d648966fdfbdcd9295"
|
||||||
|
|
||||||
|
# whether to allow packages with Git dependencies (default: false)
|
||||||
|
git_allowed = true
|
||||||
|
|
||||||
|
# whether to allow packages which depend on packages from other registries
|
||||||
|
# (default: false)
|
||||||
|
other_registries_allowed = ["https://git.acme.local/index"]
|
||||||
|
|
||||||
|
# whether to allow packages with Wally dependencies (default: false)
|
||||||
|
wally_allowed = false
|
||||||
|
|
||||||
|
# the maximum size of the archive in bytes (default: 4MB)
|
||||||
|
max_archive_size = 4194304
|
||||||
|
|
||||||
|
# the scripts packages present in the `init` command selection by default
|
||||||
|
scripts_packages = ["pesde/scripts_rojo"]
|
||||||
|
```
|
||||||
|
|
||||||
|
- **api**: The URL of the registry API. See below for more information.
|
||||||
|
|
||||||
|
- **download**: The URL to download packages from. This is optional and
|
||||||
|
defaults to the correct URL for the official pesde registry implementation.
|
||||||
|
You only need this if you are using a custom registry implementation.
|
||||||
|
|
||||||
|
This string can contain the following placeholders:
|
||||||
|
|
||||||
|
- `{API_URL}`: The API URL (as specified in the `api` field).
|
||||||
|
- `{PACKAGE}`: The package name.
|
||||||
|
- `{PACKAGE_VERSION}`: The package version.
|
||||||
|
- `{PACKAGE_TARGET}`: The package target.
|
||||||
|
|
||||||
|
Defaults to `{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}`.
|
||||||
|
|
||||||
|
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
|
||||||
|
authentication. See below for more information.
|
||||||
|
|
||||||
|
- **git_allowed**: Whether to allow packages with Git dependencies. This can be
|
||||||
|
either a bool or a list of allowed repository URLs. This is optional and
|
||||||
|
defaults to `false`.
|
||||||
|
|
||||||
|
- **other_registries_allowed**: Whether to allow packages which depend on
|
||||||
|
packages from other registries. This can be either a bool or a list of
|
||||||
|
allowed index repository URLs. This is optional and defaults to `false`.
|
||||||
|
|
||||||
|
- **wally_allowed**: Whether to allow packages with Wally dependencies. This can
|
||||||
|
be either a bool or a list of allowed index repository URLs. This is
|
||||||
|
optional and defaults to `false`.
|
||||||
|
|
||||||
|
- **max_archive_size**: The maximum size of the archive in bytes. This is
|
||||||
|
optional and defaults to `4194304` (4MB).
|
||||||
|
|
||||||
|
- **scripts_packages**: The scripts packages present in the `init` command
|
||||||
|
selection by default. This is optional and defaults to none.
|
||||||
|
|
||||||
|
You should then push this repository to [GitHub](https://github.com/).
|
||||||
|
|
||||||
|
## Configuring the registry
|
||||||
|
|
||||||
|
The registry is a web server that provides package downloads and the ability to
|
||||||
|
publish packages.
|
||||||
|
|
||||||
|
The official registry implementation is available in the
|
||||||
|
[pesde GitHub repository](https://github.com/pesde-pkg/pesde/tree/0.5/registry).
|
||||||
|
|
||||||
|
Configuring the registry is done using environment variables. In order to allow
|
||||||
|
the registry to access the index repository, you must use an account that
|
||||||
|
has access to the index repository. We recommend using a separate account
|
||||||
|
for this purpose.
|
||||||
|
|
||||||
|
<Aside>
|
||||||
|
For a GitHub account the password **must** be a personal access token. For instructions on how to
|
||||||
|
create a personal access token, see the [GitHub
|
||||||
|
documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens).
|
||||||
|
The access token must have read and write access to the index repository.
|
||||||
|
</Aside>
|
||||||
|
|
||||||
|
### General configuration
|
||||||
|
|
||||||
|
- **INDEX_REPO_URL**: The URL of the index repository. This is required.\
|
||||||
|
Example: `https://github.com/pesde-pkg/index.git`
|
||||||
|
|
||||||
|
- **GIT_USERNAME**: The username of a Git account that has push access to the
|
||||||
|
index repository. This is required.
|
||||||
|
|
||||||
|
- **GIT_PASSWORD**: The password of the account specified by
|
||||||
|
`GITHUB_USERNAME`. This is required.
|
||||||
|
|
||||||
|
- **COMMITTER_GIT_NAME**: The name to use for the committer when updating the
|
||||||
|
index repository.\
|
||||||
|
Example: `pesde index updater`
|
||||||
|
|
||||||
|
- **COMMITTER_GIT_EMAIL**: The email to use for the committer when updating the
|
||||||
|
index repository.\
|
||||||
|
Example: `pesde@localhost`
|
||||||
|
|
||||||
|
- **DATA_DIR**: The directory where the registry stores miscellaneous data.
|
||||||
|
This value can use `{CWD}` to refer to the current working directory.\
|
||||||
|
Default: `{CWD}/data`
|
||||||
|
|
||||||
|
- **ADDRESS**: The address to bind the server to.\
|
||||||
|
Default: `127.0.0.1`
|
||||||
|
|
||||||
|
- **PORT**: The port to bind the server to.\
|
||||||
|
Default: `8080`
|
||||||
|
|
||||||
|
### Authentication configuration
|
||||||
|
|
||||||
|
The registry supports multiple authentication methods, which are documented
|
||||||
|
below.
|
||||||
|
|
||||||
|
#### General configuration
|
||||||
|
|
||||||
|
- **READ_NEEDS_AUTH**: If set to any value, reading data requires
|
||||||
|
authentication. If not set, anyone can read from the registry.
|
||||||
|
This is optional.
|
||||||
|
|
||||||
|
#### Single token authentication
|
||||||
|
|
||||||
|
Allows read and write access to the registry using a single token.
|
||||||
|
|
||||||
|
- **ACCESS_TOKEN**: The token to use for authentication.
|
||||||
|
|
||||||
|
#### Multiple token authentication
|
||||||
|
|
||||||
|
Allows read and write access to the registry using different tokens.
|
||||||
|
|
||||||
|
- **READ_ACCESS_TOKEN**: The token that grants read access.
|
||||||
|
- **WRITE_ACCESS_TOKEN**: The token that grants write access.
|
||||||
|
|
||||||
|
#### GitHub OAuth authentication
|
||||||
|
|
||||||
|
Allows clients to get read and write access to the registry using GitHub OAuth.
|
||||||
|
This requires a GitHub OAuth app, instructions to create one can be found
|
||||||
|
in the [GitHub documentation](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app).
|
||||||
|
|
||||||
|
- **GITHUB_CLIENT_SECRET**: The client secret of the GitHub OAuth app.
|
||||||
|
|
||||||
|
#### No authentication
|
||||||
|
|
||||||
|
If none of the above variables are set, **anyone** will be able to read and
|
||||||
|
write to the registry.
|
||||||
|
|
||||||
|
### Storage configuration
|
||||||
|
|
||||||
|
The registry supports multiple storage backends, which are documented below.
|
||||||
|
|
||||||
|
#### File system storage
|
||||||
|
|
||||||
|
Stores packages on the file system.
|
||||||
|
|
||||||
|
- **FS_STORAGE_ROOT**: The root directory where packages are stored.
|
||||||
|
|
||||||
|
#### S3 storage
|
||||||
|
|
||||||
|
Stores packages on an S3 compatible storage service, such as
|
||||||
|
[Amazon S3](https://aws.amazon.com/s3/) or
|
||||||
|
[Cloudflare R2](https://www.cloudflare.com/r2/).
|
||||||
|
|
||||||
|
- **S3_ENDPOINT**: The endpoint of the S3 bucket to store packages in.
|
||||||
|
- **S3_BUCKET_NAME**: The name of the bucket.
|
||||||
|
- **S3_REGION**: The region of the bucket.
|
||||||
|
- **S3_ACCESS_KEY**: The access key to use.
|
||||||
|
- **S3_SECRET_KEY**: The secret key to use.
|
||||||
|
|
||||||
|
### Sentry configuration
|
||||||
|
|
||||||
|
The registry supports [Sentry](https://sentry.io/) for error tracking.
|
||||||
|
|
||||||
|
- **SENTRY_DSN**: The DSN of the Sentry instance.
|
||||||
|
|
||||||
|
## Running the registry
|
||||||
|
|
||||||
|
First clone the repository and navigate to the repository directory:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/pesde-pkg/pesde.git
|
||||||
|
cd pesde
|
||||||
|
```
|
||||||
|
|
||||||
|
You can then build the registry using the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cargo build --release -p pesde-registry
|
||||||
|
```
|
||||||
|
|
||||||
|
This will build the registry. The resulting binary will be located at
|
||||||
|
`target/release/pesde-registry` or `target/release/pesde-registry.exe`.
|
||||||
|
|
||||||
|
After setting the environment variables, you can run the registry using the
|
||||||
|
by executing the binary.
|
||||||
|
|
||||||
|
The registry must be exposed at the URL specified in the `api` field of the
|
||||||
|
index repository configuration.
|
100
docs/src/content/docs/guides/workspaces.mdx
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
---
|
||||||
|
title: Workspaces
|
||||||
|
description: Learn how to use workspaces in pesde.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { FileTree, LinkCard } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
Workspaces allow you to work with multiple pesde projects within a single
|
||||||
|
repository. Packages within a workspace can depend on each other. And you can
|
||||||
|
run commands like install or publish on every package in the workspace at once.
|
||||||
|
|
||||||
|
Let's say you have a repository with the following structure:
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- pesde.toml
|
||||||
|
- pkgs/
|
||||||
|
- foo/
|
||||||
|
- pesde.toml
|
||||||
|
- ...
|
||||||
|
- bar/
|
||||||
|
- pesde.toml
|
||||||
|
- ...
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
Within the root `pesde.toml` file, we can define a workspace:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
name = "acme/root"
|
||||||
|
version = "0.0.0"
|
||||||
|
private = true
|
||||||
|
|
||||||
|
workspace_members = ["pkgs/*"]
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, each folder within the `pkgs/` directory is considered a package in the
|
||||||
|
workspace. You can run commands like `pesde install` or `pesde publish` from
|
||||||
|
the root of the repository to run them on every package in the workspace.
|
||||||
|
|
||||||
|
## Workspace Dependencies
|
||||||
|
|
||||||
|
Packages within a workspace can depend on each other. For example, if `foo`
|
||||||
|
depends on `bar`, you can add a dependency to `bar` in the `foo/pesde.toml` file:
|
||||||
|
|
||||||
|
```toml title="pkgs/foo/pesde.toml"
|
||||||
|
name = "acme/foo"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
bar = { workspace = "acme/bar", version = "^" }
|
||||||
|
```
|
||||||
|
|
||||||
|
Workspace dependencies are replaced with normal pesde dependencies when
|
||||||
|
publishing.
|
||||||
|
|
||||||
|
The `version` field can either contain `^`, `*`, `=`, `~`, or a specific version
|
||||||
|
requirement, such as `^1.0.0`. If you use `^`, `=`, or `~`, it will be replaced
|
||||||
|
with the version of the package in the workspace when publishing.
|
||||||
|
|
||||||
|
For example, if you had the following:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
bar = { workspace = "acme/bar", version = "^" }
|
||||||
|
qux = { workspace = "acme/qux", version = "=" }
|
||||||
|
qar = { workspace = "acme/qar", version = "~" }
|
||||||
|
zoo = { workspace = "acme/zoo", version = "^2.1.0" }
|
||||||
|
baz = { workspace = "acme/baz", version = "*" }
|
||||||
|
```
|
||||||
|
|
||||||
|
If `bar`, `baz`, `qux`, `qar`, and `zoo` are all at version `2.1.5` in the
|
||||||
|
workspace, the `pesde.toml` file will be transformed into the following when
|
||||||
|
publishing.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
bar = { name = "acme/bar", version = "^2.1.5" }
|
||||||
|
qux = { name = "acme/qux", version = "=2.1.5" }
|
||||||
|
qar = { name = "acme/qar", version = "~2.1.5" }
|
||||||
|
zoo = { name = "acme/zoo", version = "^2.1.0" }
|
||||||
|
baz = { name = "acme/baz", version = "*" }
|
||||||
|
```
|
||||||
|
|
||||||
|
A `target` field can be added to the `dependencies` table to specify a target
|
||||||
|
environment for the dependency.
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[dependencies]
|
||||||
|
bar = { workspace = "acme/bar", version = "^", target = "luau" }
|
||||||
|
```
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Specifying Dependencies"
|
||||||
|
description="Learn more about specifying dependencies in pesde."
|
||||||
|
href="/guides/dependencies/"
|
||||||
|
/>
|
32
docs/src/content/docs/index.mdx
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
title: What is pesde?
|
||||||
|
description: A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune.
|
||||||
|
---
|
||||||
|
|
||||||
|
pesde is a package manager for the Luau programming language.
|
||||||
|
|
||||||
|
## Why use pesde?
|
||||||
|
|
||||||
|
When you write code, you often want to use libraries or frameworks that others
|
||||||
|
have written. Manually downloading and managing these can be cumbersome.
|
||||||
|
|
||||||
|
These libraries or frameworks can be distributed as packages. You can then
|
||||||
|
easily install and use these packages using pesde. pesde will automatically
|
||||||
|
download and manage the packages, and their dependencies, for you.
|
||||||
|
|
||||||
|
## Multi-target support
|
||||||
|
|
||||||
|
Luau can run in a lot of different places, such as on [Roblox][roblox], or in
|
||||||
|
[Lune][lune].
|
||||||
|
|
||||||
|
pesde is designed to work with all of these runtimes. Packages can publish
|
||||||
|
multiple versions of themselves, each tailored to a specific runtime.
|
||||||
|
|
||||||
|
[registry]: https://pesde.daimond113.com/
|
||||||
|
[roblox]: https://www.roblox.com/
|
||||||
|
[lune]: https://lune-org.github.io/docs
|
||||||
|
|
||||||
|
## The pesde registry
|
||||||
|
|
||||||
|
The [pesde registry][registry] is where anyone can publish their packages for
|
||||||
|
others to use.
|
99
docs/src/content/docs/installation.mdx
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
---
|
||||||
|
title: Installation
|
||||||
|
description: Install pesde
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
pesde requires [Lune](https://lune-org.github.io/docs) to be installed on your
|
||||||
|
system in order to function properly.
|
||||||
|
|
||||||
|
You can follow the installation instructions in the
|
||||||
|
[Lune documentation](https://lune-org.github.io/docs/getting-started/1-installation).
|
||||||
|
|
||||||
|
## Installing pesde
|
||||||
|
|
||||||
|
<Steps>
|
||||||
|
|
||||||
|
1. Go to the [GitHub releases page](https://github.com/pesde-pkg/pesde/releases/latest).
|
||||||
|
|
||||||
|
2. Download the corresponding archive for your operating system. You can choose
|
||||||
|
whether to use the `.zip` or `.tar.gz` files.
|
||||||
|
|
||||||
|
3. Extract the downloaded archive to a folder on your computer.
|
||||||
|
|
||||||
|
4. Open a terminal and locate the path of the extracted `pesde` binary.
|
||||||
|
|
||||||
|
<Tabs syncKey="os">
|
||||||
|
<TabItem label="Windows">
|
||||||
|
|
||||||
|
If you extracted the archive to `C:\Users\User\Downloads`, the path to the
|
||||||
|
`pesde` binary would be `C:\Users\User\Downloads\pesde.exe`.
|
||||||
|
|
||||||
|
You can then run the `self-install` command:
|
||||||
|
|
||||||
|
```ps
|
||||||
|
C:\Users\User\Downloads\pesde.exe self-install
|
||||||
|
```
|
||||||
|
|
||||||
|
pesde should now be installed on your system. You may need to restart your
|
||||||
|
computer for the changes to take effect.
|
||||||
|
|
||||||
|
<Aside type="caution">
|
||||||
|
pesde uses symlinks which are an administrator-level operation on Windows.
|
||||||
|
To ensure proper functionality, enable [Developer Mode](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
|
||||||
|
|
||||||
|
|
||||||
|
If you are getting errors such as `Failed to symlink file, a required
|
||||||
|
privilege is not held by the client`, then enabling this setting will fix
|
||||||
|
them.
|
||||||
|
</Aside>
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
<TabItem label="Linux & macOS">
|
||||||
|
|
||||||
|
If you extracted the archive to `~/Downloads`, the path to the `pesde`
|
||||||
|
binary would be `~/Downloads/pesde`.
|
||||||
|
|
||||||
|
You must then add execute permissions and run the `self-install` command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
chmod +x ~/Downloads/pesde
|
||||||
|
~/Downloads/pesde self-install
|
||||||
|
```
|
||||||
|
|
||||||
|
pesde should now be installed on your system. You will need to update your
|
||||||
|
shell configuration file to add the pesde binary to your `PATH`
|
||||||
|
environment variable.
|
||||||
|
|
||||||
|
```sh title=".zshrc"
|
||||||
|
export PATH="$PATH:$HOME/.pesde/bin"
|
||||||
|
```
|
||||||
|
|
||||||
|
You should then be able to run `pesde` after restarting your shell.
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
5. Verify that pesde is installed by running the following command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde -v
|
||||||
|
```
|
||||||
|
|
||||||
|
This command should output the version of pesde that you installed.
|
||||||
|
|
||||||
|
</Steps>
|
||||||
|
|
||||||
|
<Aside type="caution">
|
||||||
|
|
||||||
|
It is not recommended to use toolchain managers (such as Rokit or Aftman) to
|
||||||
|
install pesde. You can use `pesde self-upgrade` if you need to update pesde.
|
||||||
|
|
||||||
|
If you need everyone to use the same version of pesde, you can use the
|
||||||
|
`pesde_version` field in `pesde.toml` to specify the version of pesde to use
|
||||||
|
for the current project.
|
||||||
|
|
||||||
|
</Aside>
|
142
docs/src/content/docs/quickstart.mdx
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
---
|
||||||
|
title: Quickstart
|
||||||
|
description: Start using pesde
|
||||||
|
---
|
||||||
|
|
||||||
|
import { FileTree } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
Let's make a simple Luau program that uses the `pesde/hello` package to print
|
||||||
|
hello to the terminal.
|
||||||
|
|
||||||
|
## Scaffolding the project
|
||||||
|
|
||||||
|
In your terminal, run the following commands to create a folder and navigate
|
||||||
|
into it.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
mkdir hello-pesde
|
||||||
|
cd hello-pesde
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, we'll use `pesde init` to scaffold a new pesde project. The command will
|
||||||
|
ask you a few questions to set up the project. Our project will be named
|
||||||
|
`<username>/hello_pesde`, replace `<username>` with a username of your choice.
|
||||||
|
The name may only contain lowercase letters, numbers, and underscores. The
|
||||||
|
environment we're targeting is `luau`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde init
|
||||||
|
|
||||||
|
# what is the name of the project? <username>/hello_pesde
|
||||||
|
# what is the description of the project?
|
||||||
|
# who are the authors of this project?
|
||||||
|
# what is the repository URL of this project?
|
||||||
|
# what is the license of this project? MIT
|
||||||
|
# what environment are you targeting for your package? luau
|
||||||
|
# would you like to setup default Roblox compatibility scripts? No
|
||||||
|
```
|
||||||
|
|
||||||
|
The command will create a `pesde.toml` file in the current folder. Go ahead
|
||||||
|
and open this file in your text editor of choice.
|
||||||
|
|
||||||
|
## Adding a main script
|
||||||
|
|
||||||
|
Under the `[target]` section, we're going to add a `bin` field to specify
|
||||||
|
the path to the main script of our package.
|
||||||
|
|
||||||
|
```diff lang="toml" title="pesde.toml"
|
||||||
|
name = "<username>/hello_pesde"
|
||||||
|
version = "0.1.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
+ bin = "main.luau"
|
||||||
|
|
||||||
|
[indices]
|
||||||
|
default = "https://github.com/pesde-pkg/index"
|
||||||
|
```
|
||||||
|
|
||||||
|
Don't forget to save the file after making the changes.
|
||||||
|
|
||||||
|
Now, lets create a `main.luau` file in the project folder and add the following
|
||||||
|
code to it.
|
||||||
|
|
||||||
|
```luau title="main.luau"
|
||||||
|
print("Hello, pesde!")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the script
|
||||||
|
|
||||||
|
Then, we can run the following command to run the script.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde run
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see `Hello, pesde!` printed to the terminal.
|
||||||
|
|
||||||
|
## Install a dependency
|
||||||
|
|
||||||
|
Let's use the `pesde/hello` package instead of printing ourselves.
|
||||||
|
|
||||||
|
Run the following command to add the package to `pesde.toml`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/hello
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see that `pesde.toml` has been updated with the new dependency.
|
||||||
|
|
||||||
|
```diff lang="toml" title="pesde.toml"
|
||||||
|
name = "lukadev_0/hello_pesde"
|
||||||
|
version = "0.1.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
bin = "main.luau"
|
||||||
|
|
||||||
|
[indices]
|
||||||
|
default = "https://github.com/pesde-pkg/index"
|
||||||
|
|
||||||
|
+ [dependencies]
|
||||||
|
+ hello = { name = "pesde/hello", version = "^1.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the following command to install the new dependency.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde install
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see that pesde has created a `luau_packages` folder containing the
|
||||||
|
newly installed package. It has also created a `pesde.lock` file, this file
|
||||||
|
contains the exact versions of the dependencies that were installed so that
|
||||||
|
they can be installed again in the future.
|
||||||
|
|
||||||
|
<FileTree>
|
||||||
|
|
||||||
|
- luau_packages/
|
||||||
|
- hello.luau
|
||||||
|
- ...
|
||||||
|
- main.luau
|
||||||
|
- pesde.lock
|
||||||
|
- pesde.toml
|
||||||
|
|
||||||
|
</FileTree>
|
||||||
|
|
||||||
|
Let's update the `main.luau` file to use the `pesde/hello` package.
|
||||||
|
|
||||||
|
```luau title="main.luau"
|
||||||
|
local hello = require("./luau_packages/hello")
|
||||||
|
|
||||||
|
hello()
|
||||||
|
```
|
||||||
|
|
||||||
|
If we run the script again, we should see something printed to the terminal.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde run
|
||||||
|
# Hello, pesde! (pesde/hello@1.0.0, luau)
|
||||||
|
```
|
180
docs/src/content/docs/reference/cli.mdx
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
---
|
||||||
|
title: pesde CLI
|
||||||
|
description: Reference for the pesde CLI.
|
||||||
|
---
|
||||||
|
|
||||||
|
import { LinkCard } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
The pesde CLI is the primary way to interact with pesde projects. It provides
|
||||||
|
commands for installing dependencies, running scripts, and more.
|
||||||
|
|
||||||
|
## `pesde auth`
|
||||||
|
|
||||||
|
Authentication-related commands.
|
||||||
|
|
||||||
|
- `-i, --index`: The index of which token to manipulate. May be a URL or an alias.
|
||||||
|
Defaults to the default
|
||||||
|
index of the current project or the default index set in the config.
|
||||||
|
|
||||||
|
### `pesde auth login`
|
||||||
|
|
||||||
|
Sets the token for the index.
|
||||||
|
|
||||||
|
- `-t, --token`: The token to set.
|
||||||
|
|
||||||
|
If no token is provided, you will be prompted to authenticate with GitHub. A
|
||||||
|
code will be provided that you can paste into the GitHub authentication prompt.
|
||||||
|
|
||||||
|
### `pesde auth logout`
|
||||||
|
|
||||||
|
Removes the stored token for the index.
|
||||||
|
|
||||||
|
### `pesde auth whoami`
|
||||||
|
|
||||||
|
Prints the username of the currently authenticated user of the index. Only
|
||||||
|
works if the token is a GitHub token.
|
||||||
|
|
||||||
|
### `pesde auth token`
|
||||||
|
|
||||||
|
Prints the token for the index.
|
||||||
|
|
||||||
|
## `pesde config`
|
||||||
|
|
||||||
|
Configuration-related commands.
|
||||||
|
|
||||||
|
### `pesde config default-index`
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde config default-index [INDEX]
|
||||||
|
```
|
||||||
|
|
||||||
|
Configures the default index. If no index is provided, the current default index
|
||||||
|
is printed.
|
||||||
|
|
||||||
|
- `-r, --reset`: Resets the default index.
|
||||||
|
|
||||||
|
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
|
||||||
|
|
||||||
|
## `pesde init`
|
||||||
|
|
||||||
|
Initializes a new pesde project in the current directory.
|
||||||
|
|
||||||
|
## `pesde run`
|
||||||
|
|
||||||
|
Runs a script from the current project using Lune.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde run [SCRIPT] [ -- <ARGS>...]
|
||||||
|
```
|
||||||
|
|
||||||
|
If no script is provided, it will run the script specified by `target.bin`
|
||||||
|
in `pesde.toml`.
|
||||||
|
|
||||||
|
If a path is provided, it will run the script at that path.
|
||||||
|
|
||||||
|
If a script defined in `[scripts]` is provided, it will run that script.
|
||||||
|
|
||||||
|
If a package name is provided, it will run the script specified by `target.bin`
|
||||||
|
in that package.
|
||||||
|
|
||||||
|
Arguments can be passed to the script by using `--` followed by the arguments.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde run foo -- --arg1 --arg2
|
||||||
|
```
|
||||||
|
|
||||||
|
## `pesde install`
|
||||||
|
|
||||||
|
Installs dependencies for the current project.
|
||||||
|
|
||||||
|
- `--locked`: Whether to error if the lockfile is out of date.
|
||||||
|
- `--prod`: Whether to skip installing dev dependencies.
|
||||||
|
|
||||||
|
## `pesde publish`
|
||||||
|
|
||||||
|
Publishes the current project to the pesde registry.
|
||||||
|
|
||||||
|
- `-d, --dry-run`: Whether to perform a dry run. This will output a
|
||||||
|
tarball containing the package that would be published, but will not actually
|
||||||
|
publish it.
|
||||||
|
- `-y, --yes`: Whether to skip the confirmation prompt.
|
||||||
|
- `-i, --index`: Name of the index to publish to. Defaults to `default`.
|
||||||
|
|
||||||
|
## `pesde self-install`
|
||||||
|
|
||||||
|
Performs the pesde installation process. This should be the first command run
|
||||||
|
after downloading the pesde binary.
|
||||||
|
|
||||||
|
## `pesde self-upgrade`
|
||||||
|
|
||||||
|
Upgrades the pesde binary to the latest version.
|
||||||
|
|
||||||
|
- `--use-cached`: Whether to use the version displayed in the "upgrade available"
|
||||||
|
message instead of checking for the latest version.
|
||||||
|
|
||||||
|
## `pesde patch`
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde patch <PACKAGE>
|
||||||
|
```
|
||||||
|
|
||||||
|
Prepares a patching environment for a package. This will copy the source code of
|
||||||
|
the package to a temporary directory.
|
||||||
|
|
||||||
|
The package specified must be in the format `<name>@<version> <target>`.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Overrides"
|
||||||
|
description="Learn more about overriding and patching packages."
|
||||||
|
href="/guides/overrides/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `pesde patch-commit`
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde patch-commit <PATH>
|
||||||
|
```
|
||||||
|
|
||||||
|
Applies the changes made in the patching environment created by `pesde patch`.
|
||||||
|
|
||||||
|
## `pesde add`
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add <PACKAGE>
|
||||||
|
```
|
||||||
|
|
||||||
|
Adds a package to the dependencies of the current project.
|
||||||
|
|
||||||
|
- `-i, --index <INDEX>`: The index in which to search for the package.
|
||||||
|
- `-t, --target <TARGET>`: The target environment for the package.
|
||||||
|
- `-a, --alias <ALIAS>`: The alias to use for the package, defaults to the
|
||||||
|
package name.
|
||||||
|
- `-p, --peer`: Adds the package as a peer dependency.
|
||||||
|
- `-d, --dev`: Adds the package as a dev dependency.
|
||||||
|
|
||||||
|
The following formats are supported:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/hello
|
||||||
|
pesde add gh#acme/package#main
|
||||||
|
pesde add https://git.acme.local/package.git#aeff6
|
||||||
|
```
|
||||||
|
|
||||||
|
## `pesde update`
|
||||||
|
|
||||||
|
Updates the dependencies of the current project.
|
||||||
|
|
||||||
|
## `pesde x`
|
||||||
|
|
||||||
|
Runs a one-off binary package.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde x <PACKAGE>
|
||||||
|
```
|
||||||
|
|
||||||
|
This is useful for running a binary package without installing it or outside of
|
||||||
|
a pesde project.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde x pesde/hello
|
||||||
|
```
|
432
docs/src/content/docs/reference/manifest.mdx
Normal file
|
@ -0,0 +1,432 @@
|
||||||
|
---
|
||||||
|
title: pesde.toml
|
||||||
|
description: Reference for `pesde.toml`
|
||||||
|
---
|
||||||
|
|
||||||
|
import { LinkCard } from "@astrojs/starlight/components"
|
||||||
|
|
||||||
|
`pesde.toml` is the manifest file for a pesde package. It contains metadata about
|
||||||
|
the package and its dependencies.
|
||||||
|
|
||||||
|
## Top-level fields
|
||||||
|
|
||||||
|
```toml
|
||||||
|
name = "acme/package"
|
||||||
|
version = "1.2.3"
|
||||||
|
description = "A package that does foo and bar"
|
||||||
|
license = "MIT"
|
||||||
|
authors = ["John Doe <john.doe@acme.local> (https://acme.local)"]
|
||||||
|
repository = "https://github.com/acme/package"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `name`
|
||||||
|
|
||||||
|
The name of the package. This is used to identify the package in the registry.
|
||||||
|
|
||||||
|
The name consists of a scope and a package name, separated by a slash (`/`). It
|
||||||
|
may only contain lowercase letters, numbers, and underscores.
|
||||||
|
|
||||||
|
The first one to publish to a given scope gets to own it. If you want multiple
|
||||||
|
people to be able to publish to the same scope, you can send a pull request to
|
||||||
|
the [pesde-index GitHub repository](https://github.com/pesde-pkg/index)
|
||||||
|
and add the GitHub user ID of the other person to the `owners` field of the
|
||||||
|
`scope.toml` file of the given scope. For more information, see
|
||||||
|
[policies](/registry/policies#package-ownership).
|
||||||
|
|
||||||
|
### `version`
|
||||||
|
|
||||||
|
The version of the package. This must be a valid [SemVer](https://semver.org/)
|
||||||
|
version, such as `1.2.3`.
|
||||||
|
|
||||||
|
### `description`
|
||||||
|
|
||||||
|
A short description of the package. This is displayed on the package page in the
|
||||||
|
registry.
|
||||||
|
|
||||||
|
### `license`
|
||||||
|
|
||||||
|
The license of the package. It is recommended to use a
|
||||||
|
[SPDX license identifier](https://spdx.org/licenses/), such as `MIT` or
|
||||||
|
`Apache-2.0`.
|
||||||
|
|
||||||
|
### `authors`
|
||||||
|
|
||||||
|
A list of authors of the package. Each author is a string containing the name of
|
||||||
|
the author, optionally followed by an email address in angle brackets, and a
|
||||||
|
website URL in parentheses. For example:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
authors = ["John Doe <john.doe@acme.local> (https://acme.local)"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### `repository`
|
||||||
|
|
||||||
|
The URL of the repository where the package is hosted. This is displayed on the
|
||||||
|
package page in the registry.
|
||||||
|
|
||||||
|
### `private`
|
||||||
|
|
||||||
|
A boolean indicating whether the package is private. If set to `true`, the
|
||||||
|
package cannot be published to the registry.
|
||||||
|
|
||||||
|
### `includes`
|
||||||
|
|
||||||
|
List of globs to include in the package when publishing. Files and directories
|
||||||
|
not listed here will not be published.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
includes = [
|
||||||
|
"pesde.toml",
|
||||||
|
"README.md",
|
||||||
|
"LICENSE",
|
||||||
|
"init.luau",
|
||||||
|
"docs/**/*.md",
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### `pesde_version`
|
||||||
|
|
||||||
|
The version of pesde to use within this project. The `pesde` CLI will look at
|
||||||
|
this field and run the correct version of pesde for this project.
|
||||||
|
|
||||||
|
### `workspace_members`
|
||||||
|
|
||||||
|
A list of globs containing the members of this workspace.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Workspaces"
|
||||||
|
description="Learn more about workspaces in pesde."
|
||||||
|
href="/guides/workspaces/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `[target]`
|
||||||
|
|
||||||
|
The `[target]` section contains information about the target platform for the
|
||||||
|
package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[target]
|
||||||
|
environment = "luau"
|
||||||
|
lib = "init.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
### `environment`
|
||||||
|
|
||||||
|
The target environment for the package. This can be one of the following:
|
||||||
|
|
||||||
|
- `luau`: Standalone Luau code that can be run using the `luau` CLI.
|
||||||
|
- `lune`: Luau code that requires the Lune runtime.
|
||||||
|
- `roblox`: Luau code that must be run in Roblox.
|
||||||
|
- `roblox_server`: Same as `roblox`, but only for server-side code.
|
||||||
|
|
||||||
|
### `lib`
|
||||||
|
|
||||||
|
**Allowed in:** `luau`, `lune`, `roblox`, `roblox_server`
|
||||||
|
|
||||||
|
The entry point of the library exported by the package. This file is what will
|
||||||
|
be required when the package is loaded using `require`.
|
||||||
|
|
||||||
|
### `bin`
|
||||||
|
|
||||||
|
**Allowed in:** `luau`, `lune`
|
||||||
|
|
||||||
|
The entry point of the binary exported by the package. This file is what will be
|
||||||
|
run when the package is executed as a binary.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Using Binary Packages"
|
||||||
|
description="Learn more about using binary packages in pesde."
|
||||||
|
href="/guides/binary-packages/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
### `build_files`
|
||||||
|
|
||||||
|
**Allowed in:** `roblox`, `roblox_server`
|
||||||
|
|
||||||
|
A list of files that should be synced to Roblox when the package is installed.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
build_files = [
|
||||||
|
"init.luau",
|
||||||
|
"foo.luau",
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator)
|
||||||
|
when the package is installed in order to generate the necessary configuration.
|
||||||
|
|
||||||
|
### `scripts`
|
||||||
|
|
||||||
|
**Allowed in:** `luau`, `lune`
|
||||||
|
|
||||||
|
A list of scripts that will be linked to the dependant's `.pesde` directory, and
|
||||||
|
copied over to the [scripts](#scripts-1) section when initialising a project with
|
||||||
|
this package as the scripts package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[target.scripts]
|
||||||
|
roblox_sync_config_generator = "scripts/roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
## `[scripts]`
|
||||||
|
|
||||||
|
The `[scripts]` section contains scripts that can be run using the `pesde run`
|
||||||
|
command. These scripts are run using [Lune](https://lune-org.github.io/docs).
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[scripts]
|
||||||
|
build = "sripts/build.luau"
|
||||||
|
test = "scripts/test.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
There are also a few special scripts that are run in certain cases by pesde.
|
||||||
|
|
||||||
|
### `roblox_sync_config_generator`
|
||||||
|
|
||||||
|
This is responsible for generating adequate configuration files for Roblox
|
||||||
|
sync tools.
|
||||||
|
|
||||||
|
`process.args` will contain the directory containing the package, and the list
|
||||||
|
of files specified within the [`target.build_files`](#build_files) of the
|
||||||
|
package.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Roblox"
|
||||||
|
description="Learn more about using pesde in Roblox projects."
|
||||||
|
href="/guides/roblox/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Example script for Rojo"
|
||||||
|
description="An example script for generating configuration for Rojo."
|
||||||
|
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sync_config.luau"
|
||||||
|
/>
|
||||||
|
|
||||||
|
### `sourcemap_generator`
|
||||||
|
|
||||||
|
This is responsible for generating source maps for packages that are installed.
|
||||||
|
This is required to get proper types support when using
|
||||||
|
[Wally dependencies](/guides/dependencies/#wally-dependencies).
|
||||||
|
|
||||||
|
The script will receive the path to the package directory as the first argument
|
||||||
|
through `process.args`.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Example script for Rojo"
|
||||||
|
description="An example script for generating configuration for Rojo."
|
||||||
|
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sourcemap.luau"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `[indices]`
|
||||||
|
|
||||||
|
The `[indices]` section contains a list of pesde indices where packages can be
|
||||||
|
installed from.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[indices]
|
||||||
|
default = "https://github.com/pesde-pkg/index"
|
||||||
|
acme = "https://github.com/acme/pesde-index"
|
||||||
|
```
|
||||||
|
|
||||||
|
These can then be referenced in the [`dependencies`](#dependencies) of the
|
||||||
|
package. The `default` index is used if no index is specified.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "1.2.3", index = "acme" }
|
||||||
|
```
|
||||||
|
|
||||||
|
## `[wally_indices]`
|
||||||
|
|
||||||
|
The `[wally_indices]` section contains a list of Wally indices where packages
|
||||||
|
can be installed from. This is used for
|
||||||
|
[Wally dependencies](/guides/dependencies/#wally-dependencies).
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[wally_indices]
|
||||||
|
default = "https://github.com/UpliftGames/wally-index"
|
||||||
|
acme = "https://github.com/acme/wally-index"
|
||||||
|
```
|
||||||
|
|
||||||
|
These can then be referenced in the [`dependencies`](#dependencies) of the
|
||||||
|
package. The `default` index is used if no index is specified.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { wally = "acme/foo", version = "1.2.3", index = "acme" }
|
||||||
|
```
|
||||||
|
|
||||||
|
## `[overrides]`
|
||||||
|
|
||||||
|
The `[overrides]` section contains a list of overrides for dependencies. This
|
||||||
|
allows you to replace certain dependencies with different versions or even
|
||||||
|
different packages.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[overrides]
|
||||||
|
"bar>baz" = { name = "acme/baz", version = "1.0.0" }
|
||||||
|
"foo>bar,baz>bar" = { name = "acme/bar", version = "2.0.0" }
|
||||||
|
```
|
||||||
|
|
||||||
|
The above example will replace the `baz` dependency of the `bar` package with
|
||||||
|
version `1.0.0`, and the `bar` and `baz` dependencies of the `foo` package with
|
||||||
|
version `2.0.0`.
|
||||||
|
|
||||||
|
Each key in the overrides table is a comma-separated list of package paths. The
|
||||||
|
path is a list of package names separated by `>`. For example, `foo>bar>baz`
|
||||||
|
refers to the `baz` dependency of the `bar` package, which is a dependency of
|
||||||
|
the `foo` package.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Overrides"
|
||||||
|
description="Learn more about overriding and patching packages."
|
||||||
|
href="/guides/overrides/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `[patches]`
|
||||||
|
|
||||||
|
The `[patches]` section contains a list of patches for dependencies. This allows
|
||||||
|
you to modify the source code of dependencies.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[patches]
|
||||||
|
"acme/foo" = { "1.0.0 luau" = "patches/acme+foo-1.0.0+luau.patch" }
|
||||||
|
```
|
||||||
|
|
||||||
|
The above example will patch version `1.0.0` with the `luau` target of the
|
||||||
|
`acme/foo` package using the `patches/acme+foo-1.0.0+luau.patch` file.
|
||||||
|
|
||||||
|
Each key in the patches table is the package name, and the value is a table
|
||||||
|
where the keys are the version and target, and the value is the path to the
|
||||||
|
patch.
|
||||||
|
|
||||||
|
The patches can be generated using the `pesde patch` command.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Overrides"
|
||||||
|
description="Learn more about overriding and patching packages."
|
||||||
|
href="/guides/overrides/"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `[place]`
|
||||||
|
|
||||||
|
This is used in Roblox projects to specify where packages are located in the
|
||||||
|
Roblox datamodel.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[place]
|
||||||
|
shared = "game.ReplicatedStorage.Packages"
|
||||||
|
server = "game.ServerScriptService.Packages"
|
||||||
|
```
|
||||||
|
|
||||||
|
## `[dependencies]`
|
||||||
|
|
||||||
|
The `[dependencies]` section contains a list of dependencies for the package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "1.2.3" }
|
||||||
|
bar = { wally = "acme/bar", version = "2.3.4" }
|
||||||
|
baz = { repo = "acme/baz", rev = "main" }
|
||||||
|
```
|
||||||
|
|
||||||
|
Each key in the dependencies table is the name of the dependency, and the value
|
||||||
|
is a dependency specifier.
|
||||||
|
|
||||||
|
There are several types of dependency specifiers.
|
||||||
|
|
||||||
|
### pesde
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "1.2.3", index = "acme", target = "lune" }
|
||||||
|
```
|
||||||
|
|
||||||
|
**pesde dependencies** contain the following fields:
|
||||||
|
|
||||||
|
- `name`: The name of the package.
|
||||||
|
- `version`: The version of the package.
|
||||||
|
- `index`: The [pesde index](#indices) to install the package from. If not
|
||||||
|
specified, the `default` index is used.
|
||||||
|
- `target`: The target platform for the package. If not specified, the target
|
||||||
|
platform of the current package is used.
|
||||||
|
|
||||||
|
### Wally
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { wally = "acme/foo", version = "1.2.3", index = "acme" }
|
||||||
|
```
|
||||||
|
|
||||||
|
**Wally dependencies** contain the following fields:
|
||||||
|
|
||||||
|
- `wally`: The name of the package.
|
||||||
|
- `version`: The version of the package.
|
||||||
|
- `index`: The [Wally index](#wally_indices) to install the package from. If not
|
||||||
|
specified, the `default` index is used.
|
||||||
|
|
||||||
|
### Git
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { repo = "acme/packages", rev = "aeff6", path = "foo" }
|
||||||
|
```
|
||||||
|
|
||||||
|
**Git dependencies** contain the following fields:
|
||||||
|
|
||||||
|
- `repo`: The URL of the Git repository.
|
||||||
|
This can either be `<owner>/<name>` for a GitHub repository, or a full URL.
|
||||||
|
- `rev`: The Git revision to install. This can be a tag or commit hash.
|
||||||
|
- `path`: The path within the repository to install. If not specified, the root
|
||||||
|
of the repository is used.
|
||||||
|
|
||||||
|
### Workspace
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
foo = { workspace = "acme/foo", version = "^" }
|
||||||
|
```
|
||||||
|
|
||||||
|
**Workspace dependencies** contain the following fields:
|
||||||
|
|
||||||
|
- `workspace`: The name of the package in the workspace.
|
||||||
|
- `version`: The version requirement for the package. This can be `^`, `*`, `=`,
|
||||||
|
`~`, or a specific version requirement such as `^1.2.3`.
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Workspaces"
|
||||||
|
description="Learn more about workspace dependencies in pesde."
|
||||||
|
href="/guides/workspaces/#workspace-dependencies"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## `[peer_dependencies]`
|
||||||
|
|
||||||
|
The `[peer_dependencies]` section contains a list of peer dependencies for the
|
||||||
|
package. These are dependencies that are required by the package, but are not
|
||||||
|
installed automatically. Instead, they must be installed by the user of the
|
||||||
|
package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[peer_dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "1.2.3" }
|
||||||
|
```
|
||||||
|
|
||||||
|
## `[dev_dependencies]`
|
||||||
|
|
||||||
|
The `[dev_dependencies]` section contains a list of development dependencies for
|
||||||
|
the package. These are dependencies that are only required during development,
|
||||||
|
such as testing libraries or build tools. They are not installed when the
|
||||||
|
package is used by another package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dev_dependencies]
|
||||||
|
foo = { name = "acme/foo", version = "1.2.3" }
|
||||||
|
```
|
||||||
|
|
||||||
|
<br />
|
||||||
|
|
||||||
|
<LinkCard
|
||||||
|
title="Specifying Dependencies"
|
||||||
|
description="Learn more about specifying dependencies in pesde."
|
||||||
|
href="/guides/dependencies/"
|
||||||
|
/>
|
96
docs/src/content/docs/registry/policies.md
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
---
|
||||||
|
title: Policies
|
||||||
|
description: Policies for the pesde registry
|
||||||
|
---
|
||||||
|
|
||||||
|
The following policies apply to the [official public pesde registry](https://registry.pesde.daimond113.com)
|
||||||
|
and its related services, such as the index repository or websites.
|
||||||
|
They may not apply to other registries. By using the pesde registry, you agree
|
||||||
|
to these policies.
|
||||||
|
|
||||||
|
If anything is unclear, please [contact us](#contact-us), and we will be happy
|
||||||
|
to help.
|
||||||
|
|
||||||
|
## Contact Us
|
||||||
|
|
||||||
|
You can contact us at [pesde@daimond113.com](mailto:pesde@daimond113.com). In
|
||||||
|
case of a security issue, please prefix the subject with `[SECURITY]`.
|
||||||
|
|
||||||
|
## Permitted content
|
||||||
|
|
||||||
|
The pesde registry is a place for Luau-related packages. This includes:
|
||||||
|
|
||||||
|
- Libraries
|
||||||
|
- Frameworks
|
||||||
|
- Tools
|
||||||
|
|
||||||
|
The following content is forbidden:
|
||||||
|
|
||||||
|
- Malicious, vulnerable code
|
||||||
|
- Illegal, harmful content
|
||||||
|
- Miscellaneous files (doesn't include configuration files, documentation, etc.)
|
||||||
|
|
||||||
|
pesde is not responsible for the content of packages, the scope owner is. It
|
||||||
|
is the responsibility of the scope owner to ensure that the content of their
|
||||||
|
packages is compliant with the permitted content policy.
|
||||||
|
|
||||||
|
If you believe a package is breaking these requirements, please [contact us](#contact-us).
|
||||||
|
|
||||||
|
## Package removal
|
||||||
|
|
||||||
|
pesde does not support removing packages for reasons such as abandonment. A
|
||||||
|
package may only be removed for the following reasons:
|
||||||
|
|
||||||
|
- The package is breaking the permitted content policy
|
||||||
|
- The package contains security vulnerabilities
|
||||||
|
- The package must be removed for legal reasons (e.g. DMCA takedown)
|
||||||
|
|
||||||
|
In case a secret has been published to the registry, it must be invalidated.
|
||||||
|
If you believe a package should be removed, please [contact us](#contact-us).
|
||||||
|
We will review your request and take action if necessary.
|
||||||
|
|
||||||
|
If we find that a package is breaking the permitted content policy, we will
|
||||||
|
exercise our right to remove it from the registry without notice.
|
||||||
|
|
||||||
|
pesde reserves the right to remove any package from the registry at any time for
|
||||||
|
any or no reason, without notice.
|
||||||
|
|
||||||
|
## Package ownership
|
||||||
|
|
||||||
|
Packages are owned by scopes. Scope ownership is determined by the first person
|
||||||
|
to publish a package to the scope. The owner of the scope may send a pull request
|
||||||
|
to the index repository adding team members' user IDs to the scope's `scope.toml`
|
||||||
|
file to give them access to the scope, however at least one package must be
|
||||||
|
published to the scope before this can be done. The owner may also remove team
|
||||||
|
members from the scope.
|
||||||
|
|
||||||
|
A scope's true owner's ID must appear first in the `owners` field of the scope's
|
||||||
|
`scope.toml` file. Ownership may be transferred by the current owner sending a
|
||||||
|
pull request to the index repository, and the new owner confirming the transfer.
|
||||||
|
|
||||||
|
Only the owner may add or remove team members from the scope.
|
||||||
|
|
||||||
|
pesde reserves the right to override scope ownership in the case of a dispute,
|
||||||
|
such as if the original owner is unresponsive or multiple parties claim ownership.
|
||||||
|
|
||||||
|
## Scope squatting
|
||||||
|
|
||||||
|
Scope squatting is the act of creating a scope with the intent of preventing
|
||||||
|
others from using it, without any intention of using it yourself. This is
|
||||||
|
forbidden and can result in the removal (release) of the scope and its packages
|
||||||
|
from the registry without notice.
|
||||||
|
|
||||||
|
If you believe a scope is being squatted, please [contact us](#contact-us).
|
||||||
|
We will review your request and take action if necessary.
|
||||||
|
|
||||||
|
## API Usage
|
||||||
|
|
||||||
|
The pesde registry has an API for querying, downloading, and publishing packages.
|
||||||
|
Only non-malicious use is permitted. Malicious uses include:
|
||||||
|
|
||||||
|
- **Service Degradation**: this includes sending an excessive amount of requests
|
||||||
|
to the registry in order to degrade the service
|
||||||
|
- **Exploitation**: this includes trying to break the security of the registry
|
||||||
|
in order to gain unauthorized access
|
||||||
|
- **Harmful content**: this includes publishing harmful (non-law compliant,
|
||||||
|
purposefully insecure) content
|
2
docs/src/env.d.ts
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
/// <reference path="../.astro/types.d.ts" />
|
||||||
|
/// <reference types="astro/client" />
|
11
docs/src/tailwind.css
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
||||||
|
|
||||||
|
:root[data-theme="light"] {
|
||||||
|
--sl-color-bg: rgb(255 245 230);
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-theme="light"] .sidebar-pane {
|
||||||
|
background-color: var(--sl-color-bg);
|
||||||
|
}
|
36
docs/tailwind.config.ts
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import starlightPlugin from "@astrojs/starlight-tailwind"
|
||||||
|
import type { Config } from "tailwindcss"
|
||||||
|
import defaultTheme from "tailwindcss/defaultTheme"
|
||||||
|
|
||||||
|
export default {
|
||||||
|
content: ["./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}"],
|
||||||
|
|
||||||
|
theme: {
|
||||||
|
extend: {
|
||||||
|
fontFamily: {
|
||||||
|
sans: ["Nunito Sans Variable", ...defaultTheme.fontFamily.sans],
|
||||||
|
},
|
||||||
|
colors: {
|
||||||
|
accent: {
|
||||||
|
200: "rgb(241 157 30)",
|
||||||
|
600: "rgb(120 70 10)",
|
||||||
|
900: "rgb(24 16 8)",
|
||||||
|
950: "rgb(10 7 4)",
|
||||||
|
},
|
||||||
|
gray: {
|
||||||
|
100: "rgb(245 230 210)",
|
||||||
|
200: "rgb(228 212 192)",
|
||||||
|
300: "rgb(198 167 140)",
|
||||||
|
400: "rgb(142 128 112)",
|
||||||
|
500: "rgb(84 70 50)",
|
||||||
|
600: "rgb(65 50 41)",
|
||||||
|
700: "rgb(50 42 35)",
|
||||||
|
800: "rgb(28 22 17)",
|
||||||
|
900: "rgb(10 7 4)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
plugins: [starlightPlugin()],
|
||||||
|
} as Config
|
3
docs/tsconfig.json
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"extends": "astro/tsconfigs/strict"
|
||||||
|
}
|
5
fly.toml
|
@ -10,13 +10,12 @@ ADDRESS = '0.0.0.0'
|
||||||
PORT = '8080'
|
PORT = '8080'
|
||||||
COMMITTER_GIT_NAME = 'pesde index updater'
|
COMMITTER_GIT_NAME = 'pesde index updater'
|
||||||
COMMITTER_GIT_EMAIL = 'pesde@daimond113.com'
|
COMMITTER_GIT_EMAIL = 'pesde@daimond113.com'
|
||||||
INDEX_REPO_URL = 'https://github.com/daimond113/pesde-index'
|
INDEX_REPO_URL = 'https://github.com/pesde-pkg/index'
|
||||||
GITHUB_AUTH=1
|
|
||||||
|
|
||||||
[http_service]
|
[http_service]
|
||||||
internal_port = 8080
|
internal_port = 8080
|
||||||
force_https = true
|
force_https = true
|
||||||
auto_stop_machines = "suspend"
|
auto_stop_machines = "stop"
|
||||||
auto_start_machines = true
|
auto_start_machines = true
|
||||||
min_machines_running = 0
|
min_machines_running = 0
|
||||||
processes = ['app']
|
processes = ['app']
|
||||||
|
|
|
@ -1,23 +1,29 @@
|
||||||
INDEX_REPO_URL = # url of the index repository
|
INDEX_REPO_URL = # url of the index repository
|
||||||
|
|
||||||
GITHUB_USERNAME= # username of github account with push access to the index repository
|
GIT_USERNAME= # username of a Git account with push access to the index repository
|
||||||
GITHUB_PAT= # personal access token of github account with push access to the index repository
|
GIT_PASSWORD= # password of the account (PAT for GitHub)
|
||||||
|
|
||||||
COMMITTER_GIT_NAME= # name of the committer used for index updates
|
COMMITTER_GIT_NAME= # name of the committer used for index updates
|
||||||
COMMITTER_GIT_EMAIL= # email of the committer used for index updates
|
COMMITTER_GIT_EMAIL= # email of the committer used for index updates
|
||||||
|
|
||||||
|
DATA_DIR= # directory where miscellaneous data is stored
|
||||||
|
|
||||||
# AUTHENTICATION CONFIGURATION
|
# AUTHENTICATION CONFIGURATION
|
||||||
# Set the variables of the authentication you want to use in order to enable it
|
# Set the variables of the authentication you want to use in order to enable it
|
||||||
|
|
||||||
|
READ_NEEDS_AUTH= # set to any value to require authentication for read requests
|
||||||
|
|
||||||
# Single Token
|
# Single Token
|
||||||
ACCESS_TOKEN= # a single token that is used to authenticate all publish requests
|
ACCESS_TOKEN= # a single token that is used to authenticate all publish requests
|
||||||
|
|
||||||
# Read/Write Tokens
|
# Read/Write Tokens
|
||||||
|
# READ_NEEDS_AUTH isn't required for this
|
||||||
|
|
||||||
READ_ACCESS_TOKEN= # a token that is used to authenticate read requests
|
READ_ACCESS_TOKEN= # a token that is used to authenticate read requests
|
||||||
WRITE_ACCESS_TOKEN= # a token that is used to authenticate write requests
|
WRITE_ACCESS_TOKEN= # a token that is used to authenticate write requests
|
||||||
|
|
||||||
# GitHub
|
# GitHub
|
||||||
GITHUB_AUTH= # set to any value to enable GitHub authentication
|
GITHUB_CLIENT_SECRET= # client secret of the GitHub OAuth app configured in the index's `config.toml`
|
||||||
|
|
||||||
# If none of the above is set, no authentication is required, even for write requests
|
# If none of the above is set, no authentication is required, even for write requests
|
||||||
|
|
||||||
|
@ -34,4 +40,4 @@ S3_SECRET_KEY= # secret key of the S3 bucket
|
||||||
# FS
|
# FS
|
||||||
FS_STORAGE_ROOT= # root directory of the filesystem storage
|
FS_STORAGE_ROOT= # root directory of the filesystem storage
|
||||||
|
|
||||||
SENTRY_URL= # optional url of sentry error tracking
|
SENTRY_DSN= # optional DSN of Sentry error tracking
|
22
registry/CHANGELOG.md
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.2]
|
||||||
|
### Changed
|
||||||
|
- Update to pesde lib API changes by @daimond113
|
||||||
|
|
||||||
|
## [0.1.1] - 2024-12-19
|
||||||
|
### Changed
|
||||||
|
- Switch to traccing for logging by @daimond113
|
||||||
|
|
||||||
|
## [0.1.0] - 2024-12-14
|
||||||
|
### Added
|
||||||
|
- Rewrite registry for pesde v0.5.0 by @daimond113
|
||||||
|
|
||||||
|
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||||
|
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||||
|
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
|
@ -1,52 +1,50 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pesde-registry"
|
name = "pesde-registry"
|
||||||
version = "0.7.0"
|
version = "0.1.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://github.com/daimond113/pesde-index"
|
repository = "https://github.com/pesde-pkg/index"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "4.9.0"
|
actix-web = "4.9.0"
|
||||||
actix-multipart = "0.7.2"
|
|
||||||
actix-cors = "0.7.0"
|
actix-cors = "0.7.0"
|
||||||
actix-governor = "0.6.0"
|
actix-governor = "0.8.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
thiserror = "1.0.64"
|
thiserror = "2.0.7"
|
||||||
tantivy = "0.22.0"
|
tantivy = "0.22.0"
|
||||||
semver = "1.0.23"
|
semver = "1.0.24"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.39", features = ["serde"] }
|
||||||
url = "2.5.2"
|
futures = "0.3.31"
|
||||||
futures = "0.3.30"
|
tokio = "1.42.0"
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.14.0"
|
||||||
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
|
async-stream = "0.3.6"
|
||||||
|
|
||||||
git2 = "0.19.0"
|
git2 = "0.19.0"
|
||||||
gix = { version = "0.66.0", default-features = false, features = [
|
gix = { version = "0.68.0", default-features = false, features = [
|
||||||
"blocking-http-transport-reqwest-rust-tls",
|
"blocking-http-transport-reqwest-rust-tls",
|
||||||
"credentials",
|
"credentials",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
serde = "1.0.210"
|
serde = "1.0.216"
|
||||||
serde_json = "1.0.128"
|
serde_json = "1.0.133"
|
||||||
serde_yaml = "0.9.34"
|
serde_yaml = "0.9.34"
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
|
|
||||||
rusty-s3 = "0.5.0"
|
rusty-s3 = "0.5.0"
|
||||||
reqwest = { version = "0.12.7", features = ["json", "rustls-tls"] }
|
reqwest = { version = "0.12.9", features = ["json", "rustls-tls"] }
|
||||||
constant_time_eq = "0.3.1"
|
constant_time_eq = "0.3.1"
|
||||||
|
|
||||||
tar = "0.4.42"
|
tokio-tar = "0.3.1"
|
||||||
flate2 = "1.0.34"
|
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||||
|
|
||||||
log = "0.4.22"
|
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||||
pretty_env_logger = "0.5.0"
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||||
|
tracing-actix-web = "0.7.15"
|
||||||
|
|
||||||
sentry = "0.34.0"
|
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
|
||||||
sentry-log = "0.34.0"
|
sentry-actix = "0.35.0"
|
||||||
sentry-actix = "0.34.0"
|
|
||||||
|
|
||||||
pesde = { path = "..", features = [
|
pesde = { path = "..", features = ["wally-compat"] }
|
||||||
"wally-compat",
|
|
||||||
"git2",
|
|
||||||
] }
|
|
||||||
|
|
|
@ -1,39 +1,67 @@
|
||||||
use crate::auth::{get_token_from_req, AuthImpl, UserId};
|
use crate::{
|
||||||
|
auth::{get_token_from_req, AuthImpl, UserId},
|
||||||
|
error::ReqwestErrorExt,
|
||||||
|
};
|
||||||
use actix_web::{dev::ServiceRequest, Error as ActixError};
|
use actix_web::{dev::ServiceRequest, Error as ActixError};
|
||||||
use serde::Deserialize;
|
use reqwest::StatusCode;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GitHubAuth {
|
pub struct GitHubAuth {
|
||||||
pub reqwest_client: reqwest::Client,
|
pub reqwest_client: reqwest::Client,
|
||||||
|
pub client_id: String,
|
||||||
|
pub client_secret: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct TokenRequestBody {
|
||||||
|
access_token: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthImpl for GitHubAuth {
|
impl AuthImpl for GitHubAuth {
|
||||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||||
let token = match get_token_from_req(req, true) {
|
let token = match get_token_from_req(req) {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
let response = match self
|
let response = match self
|
||||||
.reqwest_client
|
.reqwest_client
|
||||||
.get("https://api.github.com/user")
|
.post(format!(
|
||||||
.header(reqwest::header::AUTHORIZATION, token)
|
"https://api.github.com/applications/{}/token",
|
||||||
|
self.client_id
|
||||||
|
))
|
||||||
|
.basic_auth(&self.client_id, Some(&self.client_secret))
|
||||||
|
.json(&TokenRequestBody {
|
||||||
|
access_token: token,
|
||||||
|
})
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.and_then(|res| res.error_for_status())
|
|
||||||
{
|
{
|
||||||
Ok(response) => response,
|
Ok(response) => match response.error_for_status_ref() {
|
||||||
|
Ok(_) => response,
|
||||||
|
Err(e) if e.status().is_some_and(|s| s == StatusCode::NOT_FOUND) => {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
tracing::error!(
|
||||||
|
"failed to get user: {}",
|
||||||
|
response.into_error().await.unwrap_err()
|
||||||
|
);
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to get user: {e}");
|
tracing::error!("failed to get user: {e}");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let user_id = match response.json::<UserResponse>().await {
|
let user_id = match response.json::<UserResponse>().await {
|
||||||
Ok(user) => user.id,
|
Ok(resp) => resp.user.id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to get user: {e}");
|
tracing::error!("failed to get user: {e}");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -49,6 +77,11 @@ impl Display for GitHubAuth {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
struct UserResponse {
|
struct User {
|
||||||
id: u64,
|
id: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct UserResponse {
|
||||||
|
user: User,
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,8 @@ use actix_web::{
|
||||||
middleware::Next,
|
middleware::Next,
|
||||||
web, HttpMessage, HttpResponse,
|
web, HttpMessage, HttpResponse,
|
||||||
};
|
};
|
||||||
|
use pesde::source::pesde::IndexConfig;
|
||||||
|
use sentry::add_breadcrumb;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
@ -55,7 +57,7 @@ pub trait AuthImpl: Display {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_needs_auth(&self) -> bool {
|
fn read_needs_auth(&self) -> bool {
|
||||||
false
|
benv!("READ_NEEDS_AUTH").is_ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,6 +115,13 @@ pub async fn write_mw(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
add_breadcrumb(sentry::Breadcrumb {
|
||||||
|
category: Some("auth".into()),
|
||||||
|
message: Some(format!("write request authorized as {}", user_id.0)),
|
||||||
|
level: sentry::Level::Info,
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
req.extensions_mut().insert(user_id);
|
req.extensions_mut().insert(user_id);
|
||||||
|
|
||||||
next.call(req).await.map(|res| res.map_into_left_body())
|
next.call(req).await.map(|res| res.map_into_left_body())
|
||||||
|
@ -133,6 +142,13 @@ pub async fn read_mw(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
add_breadcrumb(sentry::Breadcrumb {
|
||||||
|
category: Some("auth".into()),
|
||||||
|
message: Some(format!("read request authorized as {}", user_id.0)),
|
||||||
|
level: sentry::Level::Info,
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
req.extensions_mut().insert(Some(user_id));
|
req.extensions_mut().insert(Some(user_id));
|
||||||
} else {
|
} else {
|
||||||
req.extensions_mut().insert(None::<UserId>);
|
req.extensions_mut().insert(None::<UserId>);
|
||||||
|
@ -141,14 +157,19 @@ pub async fn read_mw(
|
||||||
next.call(req).await.map(|res| res.map_into_left_body())
|
next.call(req).await.map(|res| res.map_into_left_body())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_auth_from_env() -> Auth {
|
pub fn get_auth_from_env(config: &IndexConfig) -> Auth {
|
||||||
if let Ok(token) = benv!("ACCESS_TOKEN") {
|
if let Ok(token) = benv!("ACCESS_TOKEN") {
|
||||||
Auth::Token(token::TokenAuth {
|
Auth::Token(token::TokenAuth {
|
||||||
token: *Sha256::digest(token.as_bytes()).as_ref(),
|
token: *Sha256::digest(token.as_bytes()).as_ref(),
|
||||||
})
|
})
|
||||||
} else if benv!("GITHUB_AUTH").is_ok() {
|
} else if let Ok(client_secret) = benv!("GITHUB_CLIENT_SECRET") {
|
||||||
Auth::GitHub(github::GitHubAuth {
|
Auth::GitHub(github::GitHubAuth {
|
||||||
reqwest_client: make_reqwest(),
|
reqwest_client: make_reqwest(),
|
||||||
|
client_id: config
|
||||||
|
.github_oauth_client_id
|
||||||
|
.clone()
|
||||||
|
.expect("index isn't configured for GitHub"),
|
||||||
|
client_secret,
|
||||||
})
|
})
|
||||||
} else if let Ok((r, w)) =
|
} else if let Ok((r, w)) =
|
||||||
benv!("READ_ACCESS_TOKEN").and_then(|r| benv!("WRITE_ACCESS_TOKEN").map(|w| (r, w)))
|
benv!("READ_ACCESS_TOKEN").and_then(|r| benv!("WRITE_ACCESS_TOKEN").map(|w| (r, w)))
|
||||||
|
@ -162,23 +183,13 @@ pub fn get_auth_from_env() -> Auth {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_token_from_req(req: &ServiceRequest, bearer: bool) -> Option<String> {
|
pub fn get_token_from_req(req: &ServiceRequest) -> Option<String> {
|
||||||
let token = match req
|
let token = req
|
||||||
.headers()
|
.headers()
|
||||||
.get(AUTHORIZATION)
|
.get(AUTHORIZATION)
|
||||||
.and_then(|token| token.to_str().ok())
|
.and_then(|token| token.to_str().ok())?;
|
||||||
{
|
|
||||||
Some(token) => token,
|
|
||||||
None => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let token = if bearer {
|
let token = if token.to_lowercase().starts_with("bearer ") {
|
||||||
if token.to_lowercase().starts_with("bearer ") {
|
|
||||||
token.to_string()
|
|
||||||
} else {
|
|
||||||
format!("Bearer {token}")
|
|
||||||
}
|
|
||||||
} else if token.to_lowercase().starts_with("bearer ") {
|
|
||||||
token[7..].to_string()
|
token[7..].to_string()
|
||||||
} else {
|
} else {
|
||||||
token.to_string()
|
token.to_string()
|
||||||
|
|
|
@ -12,7 +12,7 @@ pub struct RwTokenAuth {
|
||||||
|
|
||||||
impl AuthImpl for RwTokenAuth {
|
impl AuthImpl for RwTokenAuth {
|
||||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||||
let token = match get_token_from_req(req, false) {
|
let token = match get_token_from_req(req) {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
|
@ -27,7 +27,7 @@ impl AuthImpl for RwTokenAuth {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||||
let token = match get_token_from_req(req, false) {
|
let token = match get_token_from_req(req) {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
|
|
|
@ -12,7 +12,7 @@ pub struct TokenAuth {
|
||||||
|
|
||||||
impl AuthImpl for TokenAuth {
|
impl AuthImpl for TokenAuth {
|
||||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||||
let token = match get_token_from_req(req, false) {
|
let token = match get_token_from_req(req) {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
|
|
|
@ -7,7 +7,7 @@ use pesde::{
|
||||||
manifest::target::TargetKind,
|
manifest::target::TargetKind,
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{
|
source::{
|
||||||
git_index::GitBasedSource,
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
pesde::{DocEntryKind, IndexFile},
|
pesde::{DocEntryKind, IndexFile},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -71,10 +71,12 @@ pub async fn get_package_version(
|
||||||
|
|
||||||
let (scope, name_part) = name.as_str();
|
let (scope, name_part) = name.as_str();
|
||||||
|
|
||||||
let entries: IndexFile = {
|
let file: IndexFile = {
|
||||||
let source = app_state.source.lock().unwrap();
|
let source = app_state.source.lock().await;
|
||||||
|
let repo = gix::open(source.path(&app_state.project))?;
|
||||||
|
let tree = root_tree(&repo)?;
|
||||||
|
|
||||||
match source.read_file([scope, name_part], &app_state.project, None)? {
|
match read_file(&tree, [scope, name_part])? {
|
||||||
Some(versions) => toml::de::from_str(&versions)?,
|
Some(versions) => toml::de::from_str(&versions)?,
|
||||||
None => return Ok(HttpResponse::NotFound().finish()),
|
None => return Ok(HttpResponse::NotFound().finish()),
|
||||||
}
|
}
|
||||||
|
@ -82,14 +84,15 @@ pub async fn get_package_version(
|
||||||
|
|
||||||
let Some((v_id, entry, targets)) = ({
|
let Some((v_id, entry, targets)) = ({
|
||||||
let version = match version {
|
let version = match version {
|
||||||
VersionRequest::Latest => match entries.keys().map(|k| k.version()).max() {
|
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() {
|
||||||
Some(latest) => latest.clone(),
|
Some(latest) => latest.clone(),
|
||||||
None => return Ok(HttpResponse::NotFound().finish()),
|
None => return Ok(HttpResponse::NotFound().finish()),
|
||||||
},
|
},
|
||||||
VersionRequest::Specific(version) => version,
|
VersionRequest::Specific(version) => version,
|
||||||
};
|
};
|
||||||
|
|
||||||
let versions = entries
|
let versions = file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(v_id, _)| *v_id.version() == version);
|
.filter(|(v_id, _)| *v_id.version() == version);
|
||||||
|
|
||||||
|
@ -162,6 +165,7 @@ pub async fn get_package_version(
|
||||||
|
|
||||||
let mut value = serde_json::to_value(response)?;
|
let mut value = serde_json::to_value(response)?;
|
||||||
value["docs"] = serde_json::to_value(entry.docs.clone())?;
|
value["docs"] = serde_json::to_value(entry.docs.clone())?;
|
||||||
|
value["dependencies"] = serde_json::to_value(entry.dependencies.clone())?;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(value))
|
Ok(HttpResponse::Ok().json(value))
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,15 @@ use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
|
||||||
use actix_web::{web, HttpResponse, Responder};
|
use actix_web::{web, HttpResponse, Responder};
|
||||||
|
|
||||||
|
use crate::{error::Error, package::PackageResponse, AppState};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{git_index::GitBasedSource, pesde::IndexFile},
|
source::{
|
||||||
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
|
pesde::IndexFile,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{error::Error, package::PackageResponse, AppState};
|
|
||||||
|
|
||||||
pub async fn get_package_versions(
|
pub async fn get_package_versions(
|
||||||
app_state: web::Data<AppState>,
|
app_state: web::Data<AppState>,
|
||||||
path: web::Path<PackageName>,
|
path: web::Path<PackageName>,
|
||||||
|
@ -17,16 +19,20 @@ pub async fn get_package_versions(
|
||||||
|
|
||||||
let (scope, name_part) = name.as_str();
|
let (scope, name_part) = name.as_str();
|
||||||
|
|
||||||
let source = app_state.source.lock().unwrap();
|
let file: IndexFile = {
|
||||||
let versions: IndexFile =
|
let source = app_state.source.lock().await;
|
||||||
match source.read_file([scope, name_part], &app_state.project, None)? {
|
let repo = gix::open(source.path(&app_state.project))?;
|
||||||
|
let tree = root_tree(&repo)?;
|
||||||
|
|
||||||
|
match read_file(&tree, [scope, name_part])? {
|
||||||
Some(versions) => toml::de::from_str(&versions)?,
|
Some(versions) => toml::de::from_str(&versions)?,
|
||||||
None => return Ok(HttpResponse::NotFound().finish()),
|
None => return Ok(HttpResponse::NotFound().finish()),
|
||||||
};
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut responses = BTreeMap::new();
|
let mut responses = BTreeMap::new();
|
||||||
|
|
||||||
for (v_id, entry) in versions {
|
for (v_id, entry) in file.entries {
|
||||||
let info = responses
|
let info = responses
|
||||||
.entry(v_id.version().clone())
|
.entry(v_id.version().clone())
|
||||||
.or_insert_with(|| PackageResponse {
|
.or_insert_with(|| PackageResponse {
|
||||||
|
|
|
@ -1,18 +1,3 @@
|
||||||
use actix_multipart::Multipart;
|
|
||||||
use actix_web::{web, HttpResponse, Responder};
|
|
||||||
use convert_case::{Case, Casing};
|
|
||||||
use flate2::read::GzDecoder;
|
|
||||||
use futures::{future::join_all, join, StreamExt};
|
|
||||||
use git2::{Remote, Repository, Signature};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use std::{
|
|
||||||
collections::{BTreeSet, HashMap},
|
|
||||||
fs::read_dir,
|
|
||||||
io::{Cursor, Read, Write},
|
|
||||||
};
|
|
||||||
use tar::Archive;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::UserId,
|
auth::UserId,
|
||||||
benv,
|
benv,
|
||||||
|
@ -21,10 +6,16 @@ use crate::{
|
||||||
storage::StorageImpl,
|
storage::StorageImpl,
|
||||||
AppState,
|
AppState,
|
||||||
};
|
};
|
||||||
|
use actix_web::{web, web::Bytes, HttpResponse, Responder};
|
||||||
|
use async_compression::Level;
|
||||||
|
use convert_case::{Case, Casing};
|
||||||
|
use fs_err::tokio as fs;
|
||||||
|
use futures::{future::join_all, join};
|
||||||
|
use git2::{Remote, Repository, Signature};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
manifest::Manifest,
|
manifest::Manifest,
|
||||||
source::{
|
source::{
|
||||||
git_index::GitBasedSource,
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
|
@ -32,6 +23,14 @@ use pesde::{
|
||||||
},
|
},
|
||||||
MANIFEST_FILE_NAME,
|
MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
|
use sentry::add_breadcrumb;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::{
|
||||||
|
collections::{BTreeSet, HashMap},
|
||||||
|
io::{Cursor, Write},
|
||||||
|
};
|
||||||
|
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||||
|
|
||||||
fn signature<'a>() -> Signature<'a> {
|
fn signature<'a>() -> Signature<'a> {
|
||||||
Signature::now(
|
Signature::now(
|
||||||
|
@ -61,7 +60,7 @@ const ADDITIONAL_FORBIDDEN_FILES: &[&str] = &["default.project.json"];
|
||||||
struct DocEntryInfo {
|
struct DocEntryInfo {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
label: Option<String>,
|
label: Option<String>,
|
||||||
#[serde(default)]
|
#[serde(default, alias = "position")]
|
||||||
sidebar_position: Option<usize>,
|
sidebar_position: Option<usize>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
collapsed: bool,
|
collapsed: bool,
|
||||||
|
@ -69,32 +68,20 @@ struct DocEntryInfo {
|
||||||
|
|
||||||
pub async fn publish_package(
|
pub async fn publish_package(
|
||||||
app_state: web::Data<AppState>,
|
app_state: web::Data<AppState>,
|
||||||
mut body: Multipart,
|
bytes: Bytes,
|
||||||
user_id: web::ReqData<UserId>,
|
user_id: web::ReqData<UserId>,
|
||||||
) -> Result<impl Responder, Error> {
|
) -> Result<impl Responder, Error> {
|
||||||
let max_archive_size = {
|
let source = app_state.source.lock().await;
|
||||||
let source = app_state.source.lock().unwrap();
|
source.refresh(&app_state.project).await.map_err(Box::new)?;
|
||||||
source.refresh(&app_state.project).map_err(Box::new)?;
|
let config = source.config(&app_state.project).await?;
|
||||||
source.config(&app_state.project)?.max_archive_size
|
|
||||||
};
|
|
||||||
|
|
||||||
let bytes = body
|
|
||||||
.next()
|
|
||||||
.await
|
|
||||||
.ok_or(Error::InvalidArchive)?
|
|
||||||
.map_err(|_| Error::InvalidArchive)?
|
|
||||||
.bytes(max_archive_size)
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::InvalidArchive)?
|
|
||||||
.map_err(|_| Error::InvalidArchive)?;
|
|
||||||
|
|
||||||
let package_dir = tempfile::tempdir()?;
|
let package_dir = tempfile::tempdir()?;
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut decoder = GzDecoder::new(Cursor::new(&bytes));
|
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(Cursor::new(&bytes));
|
||||||
let mut archive = Archive::new(&mut decoder);
|
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||||
|
|
||||||
archive.unpack(package_dir.path())?;
|
archive.unpack(package_dir.path()).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut manifest = None::<Manifest>;
|
let mut manifest = None::<Manifest>;
|
||||||
|
@ -102,39 +89,44 @@ pub async fn publish_package(
|
||||||
let mut docs = BTreeSet::new();
|
let mut docs = BTreeSet::new();
|
||||||
let mut docs_pages = HashMap::new();
|
let mut docs_pages = HashMap::new();
|
||||||
|
|
||||||
for entry in read_dir(package_dir.path())? {
|
let mut read_dir = fs::read_dir(package_dir.path()).await?;
|
||||||
let entry = entry?;
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
let file_name = entry
|
let file_name = entry
|
||||||
.file_name()
|
.file_name()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::InvalidArchive)?
|
.ok_or_else(|| Error::InvalidArchive("file name contains non UTF-8 characters".into()))?
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
if entry.file_type()?.is_dir() {
|
if entry.file_type().await?.is_dir() {
|
||||||
if IGNORED_DIRS.contains(&file_name.as_str()) {
|
if IGNORED_DIRS.contains(&file_name.as_str()) {
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(format!(
|
||||||
|
"archive contains forbidden directory: {file_name}"
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_name == "docs" {
|
if file_name == "docs" {
|
||||||
let mut stack = vec![(
|
let mut stack = vec![(
|
||||||
BTreeSet::new(),
|
BTreeSet::new(),
|
||||||
read_dir(entry.path())?,
|
fs::read_dir(entry.path()).await?,
|
||||||
None::<DocEntryInfo>,
|
None::<DocEntryInfo>,
|
||||||
)];
|
)];
|
||||||
|
|
||||||
'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
|
'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
|
||||||
for entry in iter {
|
while let Some(entry) = iter.next_entry().await? {
|
||||||
let entry = entry?;
|
|
||||||
let file_name = entry
|
let file_name = entry
|
||||||
.file_name()
|
.file_name()
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::InvalidArchive)?
|
.ok_or_else(|| {
|
||||||
|
Error::InvalidArchive(
|
||||||
|
"file name contains non UTF-8 characters".into(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
if entry.file_type()?.is_dir() {
|
if entry.file_type().await?.is_dir() {
|
||||||
stack.push((
|
stack.push((
|
||||||
BTreeSet::new(),
|
BTreeSet::new(),
|
||||||
read_dir(entry.path())?,
|
fs::read_dir(entry.path()).await?,
|
||||||
Some(DocEntryInfo {
|
Some(DocEntryInfo {
|
||||||
label: Some(file_name.to_case(Case::Title)),
|
label: Some(file_name.to_case(Case::Title)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -144,7 +136,7 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_name == "_category_.json" {
|
if file_name == "_category_.json" {
|
||||||
let info = std::fs::read_to_string(entry.path())?;
|
let info = fs::read_to_string(entry.path()).await?;
|
||||||
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
||||||
let old_info = category_info.take();
|
let old_info = category_info.take();
|
||||||
info.label = info.label.or(old_info.and_then(|i| i.label));
|
info.label = info.label.or(old_info.and_then(|i| i.label));
|
||||||
|
@ -156,16 +148,16 @@ pub async fn publish_package(
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let content = std::fs::read_to_string(entry.path())?;
|
let content = fs::read_to_string(entry.path()).await?;
|
||||||
let content = content.trim();
|
let content = content.trim();
|
||||||
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
||||||
|
|
||||||
let mut gz = flate2::read::GzEncoder::new(
|
let mut gz = async_compression::tokio::bufread::GzipEncoder::with_quality(
|
||||||
Cursor::new(content.as_bytes().to_vec()),
|
Cursor::new(content.as_bytes().to_vec()),
|
||||||
flate2::Compression::best(),
|
Level::Best,
|
||||||
);
|
);
|
||||||
let mut bytes = vec![];
|
let mut bytes = vec![];
|
||||||
gz.read_to_end(&mut bytes)?;
|
gz.read_to_end(&mut bytes).await?;
|
||||||
docs_pages.insert(hash.to_string(), bytes);
|
docs_pages.insert(hash.to_string(), bytes);
|
||||||
|
|
||||||
let mut lines = content.lines().peekable();
|
let mut lines = content.lines().peekable();
|
||||||
|
@ -190,8 +182,12 @@ pub async fn publish_package(
|
||||||
.and_then(|l| l.strip_prefix("# "))
|
.and_then(|l| l.strip_prefix("# "))
|
||||||
.map(|s| s.to_string());
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
let info: DocEntryInfo = serde_yaml::from_str(&front_matter)
|
let info: DocEntryInfo =
|
||||||
.map_err(|_| Error::InvalidArchive)?;
|
serde_yaml::from_str(&front_matter).map_err(|_| {
|
||||||
|
Error::InvalidArchive(format!(
|
||||||
|
"doc {file_name}'s frontmatter isn't valid YAML"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
set.insert(DocEntry {
|
set.insert(DocEntry {
|
||||||
label: info.label.or(h1).unwrap_or(file_name.to_case(Case::Title)),
|
label: info.label.or(h1).unwrap_or(file_name.to_case(Case::Title)),
|
||||||
|
@ -203,7 +199,11 @@ pub async fn publish_package(
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.with_extension("")
|
.with_extension("")
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok_or(Error::InvalidArchive)?
|
.ok_or_else(|| {
|
||||||
|
Error::InvalidArchive(
|
||||||
|
"file name contains non UTF-8 characters".into(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
// ensure that the path is always using forward slashes
|
// ensure that the path is always using forward slashes
|
||||||
.replace("\\", "/"),
|
.replace("\\", "/"),
|
||||||
hash,
|
hash,
|
||||||
|
@ -237,16 +237,16 @@ pub async fn publish_package(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if IGNORED_FILES.contains(&file_name.as_str()) {
|
if IGNORED_FILES.contains(&file_name.as_str())
|
||||||
return Err(Error::InvalidArchive);
|
|| ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
|
||||||
}
|
{
|
||||||
|
return Err(Error::InvalidArchive(format!(
|
||||||
if ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str()) {
|
"archive contains forbidden file: {file_name}"
|
||||||
return Err(Error::InvalidArchive);
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_name == MANIFEST_FILE_NAME {
|
if file_name == MANIFEST_FILE_NAME {
|
||||||
let content = std::fs::read_to_string(entry.path())?;
|
let content = fs::read_to_string(entry.path()).await?;
|
||||||
|
|
||||||
manifest = Some(toml::de::from_str(&content)?);
|
manifest = Some(toml::de::from_str(&content)?);
|
||||||
} else if file_name
|
} else if file_name
|
||||||
|
@ -256,30 +256,44 @@ pub async fn publish_package(
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
if readme.is_some() {
|
if readme.is_some() {
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(
|
||||||
|
"archive contains multiple readme files".into(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let file = std::fs::File::open(entry.path())?;
|
let mut file = fs::File::open(entry.path()).await?;
|
||||||
|
|
||||||
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best());
|
let mut gz = async_compression::tokio::write::GzipEncoder::new(vec![]);
|
||||||
let mut bytes = vec![];
|
tokio::io::copy(&mut file, &mut gz).await?;
|
||||||
gz.read_to_end(&mut bytes)?;
|
gz.shutdown().await?;
|
||||||
readme = Some(bytes);
|
readme = Some(gz.into_inner());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let Some(manifest) = manifest else {
|
let Some(manifest) = manifest else {
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(
|
||||||
|
"archive doesn't contain a manifest".into(),
|
||||||
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
add_breadcrumb(sentry::Breadcrumb {
|
||||||
let source = app_state.source.lock().unwrap();
|
category: Some("publish".into()),
|
||||||
source.refresh(&app_state.project).map_err(Box::new)?;
|
message: Some(format!(
|
||||||
let config = source.config(&app_state.project)?;
|
"publish request for {}@{} {}. has readme: {}. docs: {}",
|
||||||
|
manifest.name,
|
||||||
|
manifest.version,
|
||||||
|
manifest.target,
|
||||||
|
readme.is_some(),
|
||||||
|
docs_pages.len()
|
||||||
|
)),
|
||||||
|
level: sentry::Level::Info,
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
let dependencies = manifest
|
{
|
||||||
.all_dependencies()
|
let dependencies = manifest.all_dependencies().map_err(|e| {
|
||||||
.map_err(|_| Error::InvalidArchive)?;
|
Error::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
for (specifier, _) in dependencies.values() {
|
for (specifier, _) in dependencies.values() {
|
||||||
match specifier {
|
match specifier {
|
||||||
|
@ -288,47 +302,58 @@ pub async fn publish_package(
|
||||||
.index
|
.index
|
||||||
.as_deref()
|
.as_deref()
|
||||||
.filter(|index| match gix::Url::try_from(*index) {
|
.filter(|index| match gix::Url::try_from(*index) {
|
||||||
Ok(_) if config.other_registries_allowed => true,
|
Ok(url) => config
|
||||||
Ok(url) => url == *source.repo_url(),
|
.other_registries_allowed
|
||||||
|
.is_allowed_or_same(source.repo_url().clone(), url),
|
||||||
Err(_) => false,
|
Err(_) => false,
|
||||||
})
|
})
|
||||||
.is_none()
|
.is_none()
|
||||||
{
|
{
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(format!(
|
||||||
|
"invalid index in pesde dependency {specifier}"
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Wally(specifier) => {
|
DependencySpecifiers::Wally(specifier) => {
|
||||||
if !config.wally_allowed {
|
|
||||||
return Err(Error::InvalidArchive);
|
|
||||||
}
|
|
||||||
|
|
||||||
if specifier
|
if specifier
|
||||||
.index
|
.index
|
||||||
.as_ref()
|
.as_deref()
|
||||||
.filter(|index| index.parse::<url::Url>().is_ok())
|
.filter(|index| match gix::Url::try_from(*index) {
|
||||||
|
Ok(url) => config.wally_allowed.is_allowed(url),
|
||||||
|
Err(_) => false,
|
||||||
|
})
|
||||||
.is_none()
|
.is_none()
|
||||||
{
|
{
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(format!(
|
||||||
|
"invalid index in wally dependency {specifier}"
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {
|
DependencySpecifiers::Git(specifier) => {
|
||||||
if !config.git_allowed {
|
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(
|
||||||
|
"git dependencies are not allowed".into(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Workspace(_) => {
|
DependencySpecifiers::Workspace(_) => {
|
||||||
// workspace specifiers are to be transformed into Pesde specifiers by the sender
|
// workspace specifiers are to be transformed into pesde specifiers by the sender
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive(
|
||||||
|
"non-transformed workspace dependency".into(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = source.repo_git2(&app_state.project)?;
|
let repo = Repository::open_bare(source.path(&app_state.project))?;
|
||||||
|
let gix_repo = gix::open(repo.path())?;
|
||||||
|
|
||||||
|
let gix_tree = root_tree(&gix_repo)?;
|
||||||
|
|
||||||
let (scope, name) = manifest.name.as_str();
|
let (scope, name) = manifest.name.as_str();
|
||||||
let mut oids = vec![];
|
let mut oids = vec![];
|
||||||
|
|
||||||
match source.read_file([scope, SCOPE_INFO_FILE], &app_state.project, None)? {
|
match read_file(&gix_tree, [scope, SCOPE_INFO_FILE])? {
|
||||||
Some(info) => {
|
Some(info) => {
|
||||||
let info: ScopeInfo = toml::de::from_str(&info)?;
|
let info: ScopeInfo = toml::de::from_str(&info)?;
|
||||||
if !info.owners.contains(&user_id.0) {
|
if !info.owners.contains(&user_id.0) {
|
||||||
|
@ -346,11 +371,8 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut entries: IndexFile = toml::de::from_str(
|
let mut file: IndexFile =
|
||||||
&source
|
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
|
||||||
.read_file([scope, name], &app_state.project, None)?
|
|
||||||
.unwrap_or_default(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let new_entry = IndexFileEntry {
|
let new_entry = IndexFileEntry {
|
||||||
target: manifest.target.clone(),
|
target: manifest.target.clone(),
|
||||||
|
@ -364,11 +386,12 @@ pub async fn publish_package(
|
||||||
dependencies,
|
dependencies,
|
||||||
};
|
};
|
||||||
|
|
||||||
let this_version = entries
|
let this_version = file
|
||||||
|
.entries
|
||||||
.keys()
|
.keys()
|
||||||
.find(|v_id| *v_id.version() == manifest.version);
|
.find(|v_id| *v_id.version() == manifest.version);
|
||||||
if let Some(this_version) = this_version {
|
if let Some(this_version) = this_version {
|
||||||
let other_entry = entries.get(this_version).unwrap();
|
let other_entry = file.entries.get(this_version).unwrap();
|
||||||
|
|
||||||
// description cannot be different - which one to render in the "Recently published" list?
|
// description cannot be different - which one to render in the "Recently published" list?
|
||||||
// the others cannot be different because what to return from the versions endpoint?
|
// the others cannot be different because what to return from the versions endpoint?
|
||||||
|
@ -384,7 +407,8 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if entries
|
if file
|
||||||
|
.entries
|
||||||
.insert(
|
.insert(
|
||||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||||
new_entry.clone(),
|
new_entry.clone(),
|
||||||
|
@ -400,7 +424,7 @@ pub async fn publish_package(
|
||||||
let reference = repo.find_reference(&refspec)?;
|
let reference = repo.find_reference(&refspec)?;
|
||||||
|
|
||||||
{
|
{
|
||||||
let index_content = toml::to_string(&entries)?;
|
let index_content = toml::to_string(&file)?;
|
||||||
let mut blob_writer = repo.blob_writer(None)?;
|
let mut blob_writer = repo.blob_writer(None)?;
|
||||||
blob_writer.write_all(index_content.as_bytes())?;
|
blob_writer.write_all(index_content.as_bytes())?;
|
||||||
oids.push((name, blob_writer.commit()?));
|
oids.push((name, blob_writer.commit()?));
|
||||||
|
@ -455,7 +479,7 @@ pub async fn publish_package(
|
||||||
let (a, b, c) = join!(
|
let (a, b, c) = join!(
|
||||||
app_state
|
app_state
|
||||||
.storage
|
.storage
|
||||||
.store_package(&manifest.name, &version_id, bytes.to_vec(),),
|
.store_package(&manifest.name, &version_id, bytes.to_vec()),
|
||||||
join_all(
|
join_all(
|
||||||
docs_pages
|
docs_pages
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -4,13 +4,15 @@ use actix_web::{web, HttpResponse, Responder};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order};
|
use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order};
|
||||||
|
|
||||||
|
use crate::{error::Error, package::PackageResponse, AppState};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{git_index::GitBasedSource, pesde::IndexFile},
|
source::{
|
||||||
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
|
pesde::IndexFile,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{error::Error, package::PackageResponse, AppState};
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct Request {
|
pub struct Request {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -28,23 +30,12 @@ pub async fn search_packages(
|
||||||
|
|
||||||
let id = schema.get_field("id").unwrap();
|
let id = schema.get_field("id").unwrap();
|
||||||
|
|
||||||
let scope = schema.get_field("scope").unwrap();
|
|
||||||
let name = schema.get_field("name").unwrap();
|
|
||||||
let description = schema.get_field("description").unwrap();
|
|
||||||
|
|
||||||
let query = request.query.as_deref().unwrap_or_default().trim();
|
let query = request.query.as_deref().unwrap_or_default().trim();
|
||||||
|
|
||||||
let query = if query.is_empty() {
|
let query = if query.is_empty() {
|
||||||
Box::new(AllQuery)
|
Box::new(AllQuery)
|
||||||
} else {
|
} else {
|
||||||
let mut query_parser = tantivy::query::QueryParser::for_index(
|
app_state.query_parser.parse_query(query)?
|
||||||
searcher.index(),
|
|
||||||
vec![scope, name, description],
|
|
||||||
);
|
|
||||||
query_parser.set_field_boost(scope, 2.0);
|
|
||||||
query_parser.set_field_boost(name, 3.5);
|
|
||||||
|
|
||||||
query_parser.parse_query(query)?
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let (count, top_docs) = searcher
|
let (count, top_docs) = searcher
|
||||||
|
@ -59,7 +50,9 @@ pub async fn search_packages(
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let source = app_state.source.lock().unwrap();
|
let source = app_state.source.lock().await;
|
||||||
|
let repo = gix::open(source.path(&app_state.project))?;
|
||||||
|
let tree = root_tree(&repo)?;
|
||||||
|
|
||||||
let top_docs = top_docs
|
let top_docs = top_docs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -75,15 +68,11 @@ pub async fn search_packages(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let (scope, name) = id.as_str();
|
let (scope, name) = id.as_str();
|
||||||
|
|
||||||
let versions: IndexFile = toml::de::from_str(
|
let file: IndexFile =
|
||||||
&source
|
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
|
||||||
.read_file([scope, name], &app_state.project, None)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let (latest_version, entry) = versions
|
let (latest_version, entry) = file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.max_by_key(|(v_id, _)| v_id.version())
|
.max_by_key(|(v_id, _)| v_id.version())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -91,17 +80,19 @@ pub async fn search_packages(
|
||||||
PackageResponse {
|
PackageResponse {
|
||||||
name: id.to_string(),
|
name: id.to_string(),
|
||||||
version: latest_version.version().to_string(),
|
version: latest_version.version().to_string(),
|
||||||
targets: versions
|
targets: file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
||||||
.map(|(_, entry)| (&entry.target).into())
|
.map(|(_, entry)| (&entry.target).into())
|
||||||
.collect(),
|
.collect(),
|
||||||
description: entry.description.clone().unwrap_or_default(),
|
description: entry.description.clone().unwrap_or_default(),
|
||||||
published_at: versions
|
published_at: file
|
||||||
|
.entries
|
||||||
.values()
|
.values()
|
||||||
.max_by_key(|entry| entry.published_at)
|
.map(|entry| entry.published_at)
|
||||||
.unwrap()
|
.max()
|
||||||
.published_at,
|
.unwrap(),
|
||||||
license: entry.license.clone().unwrap_or_default(),
|
license: entry.license.clone().unwrap_or_default(),
|
||||||
authors: entry.authors.clone(),
|
authors: entry.authors.clone(),
|
||||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
||||||
use log::error;
|
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
|
||||||
use pesde::source::git_index::errors::{ReadFile, RefreshError};
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
@ -15,6 +14,9 @@ pub enum Error {
|
||||||
#[error("error deserializing file")]
|
#[error("error deserializing file")]
|
||||||
Deserialize(#[from] toml::de::Error),
|
Deserialize(#[from] toml::de::Error),
|
||||||
|
|
||||||
|
#[error("failed to send request: {1}\nserver response: {0}")]
|
||||||
|
ReqwestResponse(String, #[source] reqwest::Error),
|
||||||
|
|
||||||
#[error("error sending request")]
|
#[error("error sending request")]
|
||||||
Reqwest(#[from] reqwest::Error),
|
Reqwest(#[from] reqwest::Error),
|
||||||
|
|
||||||
|
@ -22,7 +24,7 @@ pub enum Error {
|
||||||
Tar(#[from] std::io::Error),
|
Tar(#[from] std::io::Error),
|
||||||
|
|
||||||
#[error("invalid archive")]
|
#[error("invalid archive")]
|
||||||
InvalidArchive,
|
InvalidArchive(String),
|
||||||
|
|
||||||
#[error("failed to read index config")]
|
#[error("failed to read index config")]
|
||||||
Config(#[from] pesde::source::pesde::errors::ConfigError),
|
Config(#[from] pesde::source::pesde::errors::ConfigError),
|
||||||
|
@ -38,6 +40,12 @@ pub enum Error {
|
||||||
|
|
||||||
#[error("failed to serialize struct")]
|
#[error("failed to serialize struct")]
|
||||||
SerializeJson(#[from] serde_json::Error),
|
SerializeJson(#[from] serde_json::Error),
|
||||||
|
|
||||||
|
#[error("failed to open git repo")]
|
||||||
|
OpenRepo(#[from] gix::open::Error),
|
||||||
|
|
||||||
|
#[error("failed to get root tree")]
|
||||||
|
RootTree(#[from] TreeError),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
@ -51,13 +59,31 @@ impl ResponseError for Error {
|
||||||
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||||
error: format!("failed to parse query: {e}"),
|
error: format!("failed to parse query: {e}"),
|
||||||
}),
|
}),
|
||||||
Error::Tar(_) | Error::InvalidArchive => HttpResponse::BadRequest().json(ErrorResponse {
|
Error::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||||
error: "invalid archive. ensure it has all the required files, and all the dependencies exist in the registry.".to_string(),
|
error: "corrupt archive".to_string(),
|
||||||
|
}),
|
||||||
|
Error::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||||
|
error: format!("archive is invalid: {e}"),
|
||||||
}),
|
}),
|
||||||
e => {
|
e => {
|
||||||
log::error!("unhandled error: {e:?}");
|
tracing::error!("unhandled error: {e:?}");
|
||||||
HttpResponse::InternalServerError().finish()
|
HttpResponse::InternalServerError().finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait ReqwestErrorExt {
|
||||||
|
async fn into_error(self) -> Result<Self, Error>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReqwestErrorExt for reqwest::Response {
|
||||||
|
async fn into_error(self) -> Result<Self, Error> {
|
||||||
|
match self.error_for_status_ref() {
|
||||||
|
Ok(_) => Ok(self),
|
||||||
|
Err(e) => Err(Error::ReqwestResponse(self.text().await?, e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,23 +1,27 @@
|
||||||
use actix_cors::Cors;
|
|
||||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
|
||||||
use actix_web::{
|
|
||||||
middleware::{from_fn, Compress, Condition, Logger, NormalizePath, TrailingSlash},
|
|
||||||
rt::System,
|
|
||||||
web, App, HttpServer,
|
|
||||||
};
|
|
||||||
use log::info;
|
|
||||||
use std::{env::current_dir, fs::create_dir_all, path::PathBuf, sync::Mutex};
|
|
||||||
|
|
||||||
use pesde::{
|
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
|
||||||
AuthConfig, Project,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::{get_auth_from_env, Auth, UserIdExtractor},
|
auth::{get_auth_from_env, Auth, UserIdExtractor},
|
||||||
search::make_search,
|
search::make_search,
|
||||||
storage::{get_storage_from_env, Storage},
|
storage::{get_storage_from_env, Storage},
|
||||||
};
|
};
|
||||||
|
use actix_cors::Cors;
|
||||||
|
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||||
|
use actix_web::{
|
||||||
|
middleware::{from_fn, Compress, NormalizePath, TrailingSlash},
|
||||||
|
rt::System,
|
||||||
|
web,
|
||||||
|
web::PayloadConfig,
|
||||||
|
App, HttpServer,
|
||||||
|
};
|
||||||
|
use fs_err::tokio as fs;
|
||||||
|
use pesde::{
|
||||||
|
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||||
|
AuthConfig, Project,
|
||||||
|
};
|
||||||
|
use std::{env::current_dir, path::PathBuf};
|
||||||
|
use tracing::level_filters::LevelFilter;
|
||||||
|
use tracing_subscriber::{
|
||||||
|
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
|
||||||
|
};
|
||||||
|
|
||||||
mod auth;
|
mod auth;
|
||||||
mod endpoints;
|
mod endpoints;
|
||||||
|
@ -38,13 +42,14 @@ pub fn make_reqwest() -> reqwest::Client {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub source: Mutex<PesdePackageSource>,
|
pub source: tokio::sync::Mutex<PesdePackageSource>,
|
||||||
pub project: Project,
|
pub project: Project,
|
||||||
pub storage: Storage,
|
pub storage: Storage,
|
||||||
pub auth: Auth,
|
pub auth: Auth,
|
||||||
|
|
||||||
pub search_reader: tantivy::IndexReader,
|
pub search_reader: tantivy::IndexReader,
|
||||||
pub search_writer: Mutex<tantivy::IndexWriter>,
|
pub search_writer: std::sync::Mutex<tantivy::IndexWriter>,
|
||||||
|
pub query_parser: tantivy::query::QueryParser,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
|
@ -80,13 +85,14 @@ macro_rules! benv {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run(with_sentry: bool) -> std::io::Result<()> {
|
async fn run() -> std::io::Result<()> {
|
||||||
let address = benv!("ADDRESS" => "127.0.0.1");
|
let address = benv!("ADDRESS" => "127.0.0.1");
|
||||||
let port: u16 = benv!(parse "PORT" => "8080");
|
let port: u16 = benv!(parse "PORT" => "8080");
|
||||||
|
|
||||||
let cwd = current_dir().unwrap();
|
let cwd = current_dir().unwrap();
|
||||||
let data_dir = cwd.join("data");
|
let data_dir =
|
||||||
create_dir_all(&data_dir).unwrap();
|
PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap()));
|
||||||
|
fs::create_dir_all(&data_dir).await.unwrap();
|
||||||
|
|
||||||
let project = Project::new(
|
let project = Project::new(
|
||||||
&cwd,
|
&cwd,
|
||||||
|
@ -94,31 +100,39 @@ async fn run(with_sentry: bool) -> std::io::Result<()> {
|
||||||
data_dir.join("project"),
|
data_dir.join("project"),
|
||||||
&cwd,
|
&cwd,
|
||||||
AuthConfig::new().with_git_credentials(Some(gix::sec::identity::Account {
|
AuthConfig::new().with_git_credentials(Some(gix::sec::identity::Account {
|
||||||
username: benv!(required "GITHUB_USERNAME"),
|
username: benv!(required "GIT_USERNAME"),
|
||||||
password: benv!(required "GITHUB_PAT"),
|
password: benv!(required "GIT_PASSWORD"),
|
||||||
})),
|
})),
|
||||||
);
|
);
|
||||||
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
||||||
source.refresh(&project).expect("failed to refresh source");
|
source
|
||||||
|
.refresh(&project)
|
||||||
|
.await
|
||||||
|
.expect("failed to refresh source");
|
||||||
|
let config = source
|
||||||
|
.config(&project)
|
||||||
|
.await
|
||||||
|
.expect("failed to get index config");
|
||||||
|
|
||||||
let (search_reader, search_writer) = make_search(&project, &source);
|
let (search_reader, search_writer, query_parser) = make_search(&project, &source).await;
|
||||||
|
|
||||||
let app_data = web::Data::new(AppState {
|
let app_data = web::Data::new(AppState {
|
||||||
source: Mutex::new(source),
|
|
||||||
project,
|
|
||||||
storage: {
|
storage: {
|
||||||
let storage = get_storage_from_env();
|
let storage = get_storage_from_env();
|
||||||
info!("storage: {storage}");
|
tracing::info!("storage: {storage}");
|
||||||
storage
|
storage
|
||||||
},
|
},
|
||||||
auth: {
|
auth: {
|
||||||
let auth = get_auth_from_env();
|
let auth = get_auth_from_env(&config);
|
||||||
info!("auth: {auth}");
|
tracing::info!("auth: {auth}");
|
||||||
auth
|
auth
|
||||||
},
|
},
|
||||||
|
source: tokio::sync::Mutex::new(source),
|
||||||
|
project,
|
||||||
|
|
||||||
search_reader,
|
search_reader,
|
||||||
search_writer: Mutex::new(search_writer),
|
search_writer: std::sync::Mutex::new(search_writer),
|
||||||
|
query_parser,
|
||||||
});
|
});
|
||||||
|
|
||||||
let publish_governor_config = GovernorConfigBuilder::default()
|
let publish_governor_config = GovernorConfigBuilder::default()
|
||||||
|
@ -129,14 +143,12 @@ async fn run(with_sentry: bool) -> std::io::Result<()> {
|
||||||
.finish()
|
.finish()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
info!("listening on {address}:{port}");
|
|
||||||
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.wrap(Condition::new(with_sentry, sentry_actix::Sentry::new()))
|
.wrap(sentry_actix::Sentry::with_transaction())
|
||||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||||
.wrap(Cors::permissive())
|
.wrap(Cors::permissive())
|
||||||
.wrap(Logger::default())
|
.wrap(tracing_actix_web::TracingLogger::default())
|
||||||
.wrap(Compress::default())
|
.wrap(Compress::default())
|
||||||
.app_data(app_data.clone())
|
.app_data(app_data.clone())
|
||||||
.route(
|
.route(
|
||||||
|
@ -165,12 +177,16 @@ async fn run(with_sentry: bool) -> std::io::Result<()> {
|
||||||
.to(endpoints::package_version::get_package_version)
|
.to(endpoints::package_version::get_package_version)
|
||||||
.wrap(from_fn(auth::read_mw)),
|
.wrap(from_fn(auth::read_mw)),
|
||||||
)
|
)
|
||||||
.route(
|
.service(
|
||||||
"/packages",
|
web::scope("/packages")
|
||||||
web::post()
|
.app_data(PayloadConfig::new(config.max_archive_size))
|
||||||
.to(endpoints::publish_version::publish_package)
|
.route(
|
||||||
.wrap(Governor::new(&publish_governor_config))
|
"",
|
||||||
.wrap(from_fn(auth::write_mw)),
|
web::post()
|
||||||
|
.to(endpoints::publish_version::publish_package)
|
||||||
|
.wrap(Governor::new(&publish_governor_config))
|
||||||
|
.wrap(from_fn(auth::write_mw)),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -185,33 +201,42 @@ async fn run(with_sentry: bool) -> std::io::Result<()> {
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
let _ = dotenvy::dotenv();
|
let _ = dotenvy::dotenv();
|
||||||
|
|
||||||
let sentry_url = benv!("SENTRY_URL").ok();
|
let tracing_env_filter = EnvFilter::builder()
|
||||||
let with_sentry = sentry_url.is_some();
|
.with_default_directive(LevelFilter::INFO.into())
|
||||||
|
.from_env_lossy()
|
||||||
|
.add_directive("reqwest=info".parse().unwrap())
|
||||||
|
.add_directive("rustls=info".parse().unwrap())
|
||||||
|
.add_directive("tokio_util=info".parse().unwrap())
|
||||||
|
.add_directive("goblin=info".parse().unwrap())
|
||||||
|
.add_directive("tower=info".parse().unwrap())
|
||||||
|
.add_directive("hyper=info".parse().unwrap())
|
||||||
|
.add_directive("h2=info".parse().unwrap());
|
||||||
|
|
||||||
let mut log_builder = pretty_env_logger::formatted_builder();
|
tracing_subscriber::registry()
|
||||||
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
|
.with(tracing_env_filter)
|
||||||
|
.with(
|
||||||
|
tracing_subscriber::fmt::layer()
|
||||||
|
.compact()
|
||||||
|
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
|
||||||
|
)
|
||||||
|
.with(sentry::integrations::tracing::layer())
|
||||||
|
.init();
|
||||||
|
|
||||||
if with_sentry {
|
let guard = sentry::init(sentry::ClientOptions {
|
||||||
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
|
release: sentry::release_name!(),
|
||||||
log::set_boxed_logger(Box::new(logger)).unwrap();
|
dsn: benv!(parse "SENTRY_DSN").ok(),
|
||||||
log::set_max_level(log::LevelFilter::Info);
|
session_mode: sentry::SessionMode::Request,
|
||||||
|
traces_sample_rate: 1.0,
|
||||||
|
debug: true,
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
|
if guard.is_enabled() {
|
||||||
|
std::env::set_var("RUST_BACKTRACE", "full");
|
||||||
|
tracing::info!("sentry initialized");
|
||||||
} else {
|
} else {
|
||||||
log_builder.try_init().unwrap();
|
tracing::info!("sentry **NOT** initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
let _guard = if let Some(sentry_url) = sentry_url {
|
System::new().block_on(run())
|
||||||
std::env::set_var("RUST_BACKTRACE", "1");
|
|
||||||
|
|
||||||
Some(sentry::init((
|
|
||||||
sentry_url,
|
|
||||||
sentry::ClientOptions {
|
|
||||||
release: sentry::release_name!(),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
System::new().block_on(run(with_sentry))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,8 @@ pub struct TargetInfo {
|
||||||
kind: TargetKind,
|
kind: TargetKind,
|
||||||
lib: bool,
|
lib: bool,
|
||||||
bin: bool,
|
bin: bool,
|
||||||
|
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||||
|
scripts: BTreeSet<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Target> for TargetInfo {
|
impl From<Target> for TargetInfo {
|
||||||
|
@ -22,6 +24,10 @@ impl From<&Target> for TargetInfo {
|
||||||
kind: target.kind(),
|
kind: target.kind(),
|
||||||
lib: target.lib_path().is_some(),
|
lib: target.lib_path().is_some(),
|
||||||
bin: target.bin_path().is_some(),
|
bin: target.bin_path().is_some(),
|
||||||
|
scripts: target
|
||||||
|
.scripts()
|
||||||
|
.map(|scripts| scripts.keys().cloned().collect())
|
||||||
|
.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,76 @@
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
|
use async_stream::stream;
|
||||||
|
use futures::{Stream, StreamExt};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::pesde::{IndexFileEntry, PesdePackageSource},
|
source::{
|
||||||
|
git_index::{root_tree, GitBasedSource},
|
||||||
|
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE},
|
||||||
|
},
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
use tantivy::{
|
use tantivy::{
|
||||||
doc,
|
doc,
|
||||||
|
query::QueryParser,
|
||||||
schema::{IndexRecordOption, TextFieldIndexing, TextOptions, FAST, STORED, STRING},
|
schema::{IndexRecordOption, TextFieldIndexing, TextOptions, FAST, STORED, STRING},
|
||||||
|
tokenizer::TextAnalyzer,
|
||||||
DateTime, IndexReader, IndexWriter, Term,
|
DateTime, IndexReader, IndexWriter, Term,
|
||||||
};
|
};
|
||||||
|
use tokio::pin;
|
||||||
|
|
||||||
pub fn make_search(project: &Project, source: &PesdePackageSource) -> (IndexReader, IndexWriter) {
|
pub async fn all_packages(
|
||||||
|
source: &PesdePackageSource,
|
||||||
|
project: &Project,
|
||||||
|
) -> impl Stream<Item = (PackageName, IndexFile)> {
|
||||||
|
let path = source.path(project);
|
||||||
|
|
||||||
|
stream! {
|
||||||
|
let repo = gix::open(&path).expect("failed to open index");
|
||||||
|
let tree = root_tree(&repo).expect("failed to get root tree");
|
||||||
|
|
||||||
|
for entry in tree.iter() {
|
||||||
|
let entry = entry.expect("failed to read entry");
|
||||||
|
let object = entry.object().expect("failed to get object");
|
||||||
|
|
||||||
|
// directories will be trees, and files will be blobs
|
||||||
|
if !matches!(object.kind, gix::object::Kind::Tree) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let package_scope = entry.filename().to_string();
|
||||||
|
|
||||||
|
for inner_entry in object.into_tree().iter() {
|
||||||
|
let inner_entry = inner_entry.expect("failed to read inner entry");
|
||||||
|
let object = inner_entry.object().expect("failed to get object");
|
||||||
|
|
||||||
|
if !matches!(object.kind, gix::object::Kind::Blob) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let package_name = inner_entry.filename().to_string();
|
||||||
|
|
||||||
|
if package_name == SCOPE_INFO_FILE {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let blob = object.into_blob();
|
||||||
|
let string = String::from_utf8(blob.data.clone()).expect("failed to parse utf8");
|
||||||
|
|
||||||
|
let file: IndexFile = toml::from_str(&string).expect("failed to parse index file");
|
||||||
|
|
||||||
|
// if this panics, it's an issue with the index.
|
||||||
|
let name = format!("{package_scope}/{package_name}").parse().unwrap();
|
||||||
|
|
||||||
|
yield (name, file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn make_search(
|
||||||
|
project: &Project,
|
||||||
|
source: &PesdePackageSource,
|
||||||
|
) -> (IndexReader, IndexWriter, QueryParser) {
|
||||||
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
|
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
|
||||||
|
|
||||||
let field_options = TextOptions::default().set_indexing_options(
|
let field_options = TextOptions::default().set_indexing_options(
|
||||||
|
@ -28,7 +88,9 @@ pub fn make_search(project: &Project, source: &PesdePackageSource) -> (IndexRead
|
||||||
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
|
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
|
||||||
search_index.tokenizers().register(
|
search_index.tokenizers().register(
|
||||||
"ngram",
|
"ngram",
|
||||||
tantivy::tokenizer::NgramTokenizer::all_ngrams(1, 12).unwrap(),
|
TextAnalyzer::builder(tantivy::tokenizer::NgramTokenizer::all_ngrams(1, 12).unwrap())
|
||||||
|
.filter(tantivy::tokenizer::LowerCaser)
|
||||||
|
.build(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let search_reader = search_index
|
let search_reader = search_index
|
||||||
|
@ -38,9 +100,12 @@ pub fn make_search(project: &Project, source: &PesdePackageSource) -> (IndexRead
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut search_writer = search_index.writer(50_000_000).unwrap();
|
let mut search_writer = search_index.writer(50_000_000).unwrap();
|
||||||
|
|
||||||
for (pkg_name, mut file) in source.all_packages(project).unwrap() {
|
let stream = all_packages(source, project).await;
|
||||||
let Some((_, latest_entry)) = file.pop_last() else {
|
pin!(stream);
|
||||||
log::warn!("no versions found for {pkg_name}");
|
|
||||||
|
while let Some((pkg_name, mut file)) = stream.next().await {
|
||||||
|
let Some((_, latest_entry)) = file.entries.pop_last() else {
|
||||||
|
tracing::error!("no versions found for {pkg_name}");
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -56,7 +121,11 @@ pub fn make_search(project: &Project, source: &PesdePackageSource) -> (IndexRead
|
||||||
search_writer.commit().unwrap();
|
search_writer.commit().unwrap();
|
||||||
search_reader.reload().unwrap();
|
search_reader.reload().unwrap();
|
||||||
|
|
||||||
(search_reader, search_writer)
|
let mut query_parser = QueryParser::for_index(&search_index, vec![scope, name, description]);
|
||||||
|
query_parser.set_field_boost(scope, 2.0);
|
||||||
|
query_parser.set_field_boost(name, 3.5);
|
||||||
|
|
||||||
|
(search_reader, search_writer, query_parser)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) {
|
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) {
|
||||||
|
|
|
@ -3,14 +3,29 @@ use actix_web::{
|
||||||
http::header::{CONTENT_ENCODING, CONTENT_TYPE},
|
http::header::{CONTENT_ENCODING, CONTENT_TYPE},
|
||||||
HttpResponse,
|
HttpResponse,
|
||||||
};
|
};
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||||
use std::{fmt::Display, fs::create_dir_all, path::PathBuf};
|
use std::{
|
||||||
|
fmt::Display,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct FSStorage {
|
pub struct FSStorage {
|
||||||
pub root: PathBuf,
|
pub root: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
|
||||||
|
Ok(match fs::read(path).await {
|
||||||
|
Ok(contents) => HttpResponse::Ok()
|
||||||
|
.append_header((CONTENT_TYPE, content_type))
|
||||||
|
.append_header((CONTENT_ENCODING, "gzip"))
|
||||||
|
.body(contents),
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => HttpResponse::NotFound().finish(),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
impl StorageImpl for FSStorage {
|
impl StorageImpl for FSStorage {
|
||||||
async fn store_package(
|
async fn store_package(
|
||||||
&self,
|
&self,
|
||||||
|
@ -26,9 +41,9 @@ impl StorageImpl for FSStorage {
|
||||||
.join(name)
|
.join(name)
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
create_dir_all(&path)?;
|
fs::create_dir_all(&path).await?;
|
||||||
|
|
||||||
std::fs::write(path.join("pkg.tar.gz"), &contents)?;
|
fs::write(path.join("pkg.tar.gz"), &contents).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -47,12 +62,7 @@ impl StorageImpl for FSStorage {
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
|
|
||||||
let contents = std::fs::read(path.join("pkg.tar.gz"))?;
|
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip").await
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.append_header((CONTENT_TYPE, "application/gzip"))
|
|
||||||
.append_header((CONTENT_ENCODING, "gzip"))
|
|
||||||
.body(contents))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn store_readme(
|
async fn store_readme(
|
||||||
|
@ -69,9 +79,9 @@ impl StorageImpl for FSStorage {
|
||||||
.join(name)
|
.join(name)
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
create_dir_all(&path)?;
|
fs::create_dir_all(&path).await?;
|
||||||
|
|
||||||
std::fs::write(path.join("readme.gz"), &contents)?;
|
fs::write(path.join("readme.gz"), &contents).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -90,32 +100,22 @@ impl StorageImpl for FSStorage {
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
|
|
||||||
let contents = std::fs::read(path.join("readme.gz"))?;
|
read_file_to_response(&path.join("readme.gz"), "text/plain").await
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.append_header((CONTENT_TYPE, "text/plain"))
|
|
||||||
.append_header((CONTENT_ENCODING, "gzip"))
|
|
||||||
.body(contents))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
||||||
let path = self.root.join("docs");
|
let path = self.root.join("Doc");
|
||||||
create_dir_all(&path)?;
|
fs::create_dir_all(&path).await?;
|
||||||
|
|
||||||
std::fs::write(path.join(format!("{doc_hash}.gz")), &contents)?;
|
fs::write(path.join(format!("{doc_hash}.gz")), &contents).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
|
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
|
||||||
let path = self.root.join("docs");
|
let path = self.root.join("Doc");
|
||||||
|
|
||||||
let contents = std::fs::read(path.join(format!("{doc_hash}.gz")))?;
|
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.append_header((CONTENT_TYPE, "text/plain"))
|
|
||||||
.append_header((CONTENT_ENCODING, "gzip"))
|
|
||||||
.body(contents))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
use crate::{error::Error, storage::StorageImpl};
|
use crate::{
|
||||||
|
error::{Error, ReqwestErrorExt},
|
||||||
|
storage::StorageImpl,
|
||||||
|
};
|
||||||
use actix_web::{http::header::LOCATION, HttpResponse};
|
use actix_web::{http::header::LOCATION, HttpResponse};
|
||||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||||
use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE};
|
use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE};
|
||||||
|
@ -15,7 +18,7 @@ pub struct S3Storage {
|
||||||
pub reqwest_client: reqwest::Client,
|
pub reqwest_client: reqwest::Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const S3_SIGN_DURATION: Duration = Duration::from_secs(60 * 3);
|
pub const S3_SIGN_DURATION: Duration = Duration::from_secs(60 * 15);
|
||||||
|
|
||||||
impl StorageImpl for S3Storage {
|
impl StorageImpl for S3Storage {
|
||||||
async fn store_package(
|
async fn store_package(
|
||||||
|
@ -41,6 +44,8 @@ impl StorageImpl for S3Storage {
|
||||||
.header(CONTENT_ENCODING, "gzip")
|
.header(CONTENT_ENCODING, "gzip")
|
||||||
.body(contents)
|
.body(contents)
|
||||||
.send()
|
.send()
|
||||||
|
.await?
|
||||||
|
.into_error()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -90,6 +95,8 @@ impl StorageImpl for S3Storage {
|
||||||
.header(CONTENT_ENCODING, "gzip")
|
.header(CONTENT_ENCODING, "gzip")
|
||||||
.body(contents)
|
.body(contents)
|
||||||
.send()
|
.send()
|
||||||
|
.await?
|
||||||
|
.into_error()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -120,7 +127,8 @@ impl StorageImpl for S3Storage {
|
||||||
let object_url = PutObject::new(
|
let object_url = PutObject::new(
|
||||||
&self.s3_bucket,
|
&self.s3_bucket,
|
||||||
Some(&self.s3_credentials),
|
Some(&self.s3_credentials),
|
||||||
&format!("doc/{}.gz", doc_hash),
|
// capitalize Doc to prevent conflicts with scope names
|
||||||
|
&format!("Doc/{}.gz", doc_hash),
|
||||||
)
|
)
|
||||||
.sign(S3_SIGN_DURATION);
|
.sign(S3_SIGN_DURATION);
|
||||||
|
|
||||||
|
@ -130,6 +138,8 @@ impl StorageImpl for S3Storage {
|
||||||
.header(CONTENT_ENCODING, "gzip")
|
.header(CONTENT_ENCODING, "gzip")
|
||||||
.body(contents)
|
.body(contents)
|
||||||
.send()
|
.send()
|
||||||
|
.await?
|
||||||
|
.into_error()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -139,7 +149,7 @@ impl StorageImpl for S3Storage {
|
||||||
let object_url = GetObject::new(
|
let object_url = GetObject::new(
|
||||||
&self.s3_bucket,
|
&self.s3_bucket,
|
||||||
Some(&self.s3_credentials),
|
Some(&self.s3_credentials),
|
||||||
&format!("doc/{}.gz", doc_hash),
|
&format!("Doc/{}.gz", doc_hash),
|
||||||
)
|
)
|
||||||
.sign(S3_SIGN_DURATION);
|
.sign(S3_SIGN_DURATION);
|
||||||
|
|
||||||
|
|
111
src/cli/auth.rs
|
@ -1,23 +1,57 @@
|
||||||
use crate::cli::config::{read_config, write_config};
|
use crate::cli::config::{read_config, write_config};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use gix::bstr::BStr;
|
||||||
use keyring::Entry;
|
use keyring::Entry;
|
||||||
use serde::Deserialize;
|
use reqwest::header::AUTHORIZATION;
|
||||||
|
use serde::{ser::SerializeMap, Deserialize, Serialize};
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub fn get_token() -> anyhow::Result<Option<String>> {
|
#[derive(Debug, Clone)]
|
||||||
match std::env::var("PESDE_TOKEN") {
|
pub struct Tokens(pub BTreeMap<gix::Url, String>);
|
||||||
Ok(token) => return Ok(Some(token)),
|
|
||||||
Err(std::env::VarError::NotPresent) => {}
|
impl Serialize for Tokens {
|
||||||
Err(e) => return Err(e.into()),
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::ser::Serializer,
|
||||||
|
{
|
||||||
|
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||||
|
for (k, v) in &self.0 {
|
||||||
|
map.serialize_entry(&k.to_bstring().to_string(), v)?;
|
||||||
|
}
|
||||||
|
map.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for Tokens {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::de::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
Ok(Tokens(
|
||||||
|
BTreeMap::<String, String>::deserialize(deserializer)?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(k, v)| gix::Url::from_bytes(BStr::new(&k)).map(|k| (k, v)))
|
||||||
|
.collect::<Result<_, _>>()
|
||||||
|
.map_err(serde::de::Error::custom)?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
|
pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
||||||
|
let config = read_config().await?;
|
||||||
|
if !config.tokens.0.is_empty() {
|
||||||
|
tracing::debug!("using tokens from config");
|
||||||
|
return Ok(config.tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = read_config()?;
|
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
||||||
if let Some(token) = config.token {
|
|
||||||
return Ok(Some(token));
|
|
||||||
}
|
|
||||||
|
|
||||||
match Entry::new("token", env!("CARGO_PKG_NAME")) {
|
|
||||||
Ok(entry) => match entry.get_password() {
|
Ok(entry) => match entry.get_password() {
|
||||||
Ok(token) => return Ok(Some(token)),
|
Ok(token) => {
|
||||||
|
tracing::debug!("using tokens from keyring");
|
||||||
|
return serde_json::from_str(&token).context("failed to parse tokens");
|
||||||
|
}
|
||||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
},
|
},
|
||||||
|
@ -25,32 +59,38 @@ pub fn get_token() -> anyhow::Result<Option<String>> {
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None)
|
Ok(Tokens(BTreeMap::new()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_token(token: Option<&str>) -> anyhow::Result<()> {
|
#[instrument(level = "trace")]
|
||||||
let entry = match Entry::new("token", env!("CARGO_PKG_NAME")) {
|
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
|
||||||
Ok(entry) => entry,
|
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
||||||
Err(e) => return Err(e.into()),
|
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
||||||
};
|
|
||||||
|
|
||||||
let result = if let Some(token) = token {
|
match entry.set_password(&json) {
|
||||||
entry.set_password(token)
|
Ok(()) => {
|
||||||
} else {
|
tracing::debug!("tokens saved to keyring");
|
||||||
entry.delete_credential()
|
return Ok(());
|
||||||
};
|
}
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(()) => return Ok(()),
|
|
||||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut config = read_config()?;
|
tracing::debug!("tokens saved to config");
|
||||||
config.token = token.map(|s| s.to_string());
|
|
||||||
write_config(&config)?;
|
|
||||||
|
|
||||||
Ok(())
|
let mut config = read_config().await?;
|
||||||
|
config.tokens = tokens;
|
||||||
|
write_config(&config).await.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {
|
||||||
|
let mut tokens = get_tokens().await?;
|
||||||
|
if let Some(token) = token {
|
||||||
|
tokens.0.insert(repo.clone(), token.to_string());
|
||||||
|
} else {
|
||||||
|
tokens.0.remove(repo);
|
||||||
|
}
|
||||||
|
set_tokens(tokens).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -58,18 +98,21 @@ struct UserResponse {
|
||||||
login: String,
|
login: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_token_login(
|
#[instrument(level = "trace")]
|
||||||
reqwest: &reqwest::blocking::Client,
|
pub async fn get_token_login(
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
access_token: &str,
|
access_token: &str,
|
||||||
) -> anyhow::Result<String> {
|
) -> anyhow::Result<String> {
|
||||||
let response = reqwest
|
let response = reqwest
|
||||||
.get("https://api.github.com/user")
|
.get("https://api.github.com/user")
|
||||||
.header("Authorization", access_token)
|
.header(AUTHORIZATION, access_token)
|
||||||
.send()
|
.send()
|
||||||
|
.await
|
||||||
.context("failed to send user request")?
|
.context("failed to send user request")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.context("failed to get user")?
|
.context("failed to get user")?
|
||||||
.json::<UserResponse>()
|
.json::<UserResponse>()
|
||||||
|
.await
|
||||||
.context("failed to parse user response")?;
|
.context("failed to parse user response")?;
|
||||||
|
|
||||||
Ok(response.login)
|
Ok(response.login)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use std::str::FromStr;
|
use std::{collections::HashSet, str::FromStr};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use colored::Colorize;
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
|
|
||||||
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
||||||
|
@ -47,9 +48,10 @@ pub struct AddCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AddCommand {
|
impl AddCommand {
|
||||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let manifest = project
|
let manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
|
|
||||||
let (source, specifier) = match &self.name {
|
let (source, specifier) = match &self.name {
|
||||||
|
@ -61,11 +63,14 @@ impl AddCommand {
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||||
log::error!("index {index} not found");
|
println!("{}: index {index} not found", "error".red().bold());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let index = index.unwrap_or(read_config()?.default_index);
|
let index = match index {
|
||||||
|
Some(index) => index,
|
||||||
|
None => read_config().await?.default_index,
|
||||||
|
};
|
||||||
|
|
||||||
let source = PackageSources::Pesde(PesdePackageSource::new(index));
|
let source = PackageSources::Pesde(PesdePackageSource::new(index));
|
||||||
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
||||||
|
@ -85,11 +90,11 @@ impl AddCommand {
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||||
log::error!("wally index {index} not found");
|
println!("{}: wally index {index} not found", "error".red().bold());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let index = index.unwrap_or(read_config()?.default_index);
|
let index = index.context("no wally index found")?;
|
||||||
|
|
||||||
let source =
|
let source =
|
||||||
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
|
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
|
||||||
|
@ -117,7 +122,7 @@ impl AddCommand {
|
||||||
DependencySpecifiers::Workspace(
|
DependencySpecifiers::Workspace(
|
||||||
pesde::source::workspace::specifier::WorkspaceDependencySpecifier {
|
pesde::source::workspace::specifier::WorkspaceDependencySpecifier {
|
||||||
name: name.clone(),
|
name: name.clone(),
|
||||||
version_type: version.unwrap_or_default(),
|
version: version.clone().unwrap_or_default(),
|
||||||
target: self.target,
|
target: self.target,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
@ -125,23 +130,33 @@ impl AddCommand {
|
||||||
};
|
};
|
||||||
source
|
source
|
||||||
.refresh(&project)
|
.refresh(&project)
|
||||||
|
.await
|
||||||
.context("failed to refresh package source")?;
|
.context("failed to refresh package source")?;
|
||||||
|
|
||||||
let Some(version_id) = source
|
let Some(version_id) = source
|
||||||
.resolve(&specifier, &project, manifest.target.kind())
|
.resolve(
|
||||||
|
&specifier,
|
||||||
|
&project,
|
||||||
|
manifest.target.kind(),
|
||||||
|
&mut HashSet::new(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
.1
|
.1
|
||||||
.pop_last()
|
.pop_last()
|
||||||
.map(|(v_id, _)| v_id)
|
.map(|(v_id, _)| v_id)
|
||||||
else {
|
else {
|
||||||
log::error!("no versions found for package {specifier}");
|
println!("{}: no versions found for package", "error".red().bold());
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
let project_target = manifest.target.kind();
|
let project_target = manifest.target.kind();
|
||||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||||
&project.read_manifest().context("failed to read manifest")?,
|
&project
|
||||||
|
.read_manifest()
|
||||||
|
.await
|
||||||
|
.context("failed to read manifest")?,
|
||||||
)
|
)
|
||||||
.context("failed to parse manifest")?;
|
.context("failed to parse manifest")?;
|
||||||
let dependency_key = if self.peer {
|
let dependency_key = if self.peer {
|
||||||
|
@ -220,13 +235,14 @@ impl AddCommand {
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"added workspace {}@{} to {}",
|
"added workspace {}@{} to {}",
|
||||||
spec.name, spec.version_type, dependency_key
|
spec.name, spec.version, dependency_key
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
project
|
project
|
||||||
.write_manifest(manifest.to_string())
|
.write_manifest(manifest.to_string())
|
||||||
|
.await
|
||||||
.context("failed to write manifest")?;
|
.context("failed to write manifest")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -2,27 +2,21 @@ use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
use std::thread::spawn;
|
||||||
|
use tokio::time::sleep;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
errors::ManifestReadError,
|
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::cli::{
|
use crate::cli::auth::{get_token_login, set_token};
|
||||||
auth::{get_token_login, set_token},
|
|
||||||
config::read_config,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct LoginCommand {
|
pub struct LoginCommand {
|
||||||
/// The index to use. Defaults to `default`, or the configured default index if current directory doesn't have a manifest
|
|
||||||
#[arg(short, long)]
|
|
||||||
index: Option<String>,
|
|
||||||
|
|
||||||
/// The token to use for authentication, skipping login
|
/// The token to use for authentication, skipping login
|
||||||
#[arg(short, long, conflicts_with = "index")]
|
#[arg(short, long)]
|
||||||
token: Option<String>,
|
token: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,49 +47,27 @@ enum AccessTokenResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LoginCommand {
|
impl LoginCommand {
|
||||||
pub fn authenticate_device_flow(
|
pub async fn authenticate_device_flow(
|
||||||
&self,
|
&self,
|
||||||
|
index_url: &gix::Url,
|
||||||
project: &Project,
|
project: &Project,
|
||||||
reqwest: &reqwest::blocking::Client,
|
reqwest: &reqwest::Client,
|
||||||
) -> anyhow::Result<String> {
|
) -> anyhow::Result<String> {
|
||||||
let manifest = match project.deser_manifest() {
|
println!("logging in into {index_url}");
|
||||||
Ok(manifest) => Some(manifest),
|
|
||||||
Err(e) => match e {
|
|
||||||
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
|
|
||||||
e => return Err(e.into()),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let index_url = match self.index.as_deref() {
|
let source = PesdePackageSource::new(index_url.clone());
|
||||||
Some(index) => match index.try_into() {
|
source
|
||||||
Ok(url) => Some(url),
|
.refresh(project)
|
||||||
Err(_) => None,
|
.await
|
||||||
},
|
.context("failed to refresh index")?;
|
||||||
None => match manifest {
|
|
||||||
Some(_) => None,
|
|
||||||
None => Some(read_config()?.default_index),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let index_url = match index_url {
|
|
||||||
Some(url) => url,
|
|
||||||
None => {
|
|
||||||
let index_name = self.index.as_deref().unwrap_or("default");
|
|
||||||
|
|
||||||
match manifest.unwrap().indices.get(index_name) {
|
|
||||||
Some(index) => index.clone(),
|
|
||||||
None => anyhow::bail!("Index {index_name} not found"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let source = PesdePackageSource::new(index_url);
|
|
||||||
source.refresh(project).context("failed to refresh index")?;
|
|
||||||
|
|
||||||
let config = source
|
let config = source
|
||||||
.config(project)
|
.config(project)
|
||||||
|
.await
|
||||||
.context("failed to read index config")?;
|
.context("failed to read index config")?;
|
||||||
let client_id = config.github_oauth_client_id;
|
let Some(client_id) = config.github_oauth_client_id else {
|
||||||
|
anyhow::bail!("index not configured for Github oauth.");
|
||||||
|
};
|
||||||
|
|
||||||
let response = reqwest
|
let response = reqwest
|
||||||
.post(Url::parse_with_params(
|
.post(Url::parse_with_params(
|
||||||
|
@ -103,10 +75,12 @@ impl LoginCommand {
|
||||||
&[("client_id", &client_id)],
|
&[("client_id", &client_id)],
|
||||||
)?)
|
)?)
|
||||||
.send()
|
.send()
|
||||||
|
.await
|
||||||
.context("failed to send device code request")?
|
.context("failed to send device code request")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.context("failed to get device code response")?
|
.context("failed to get device code response")?
|
||||||
.json::<DeviceCodeResponse>()
|
.json::<DeviceCodeResponse>()
|
||||||
|
.await
|
||||||
.context("failed to parse device code response")?;
|
.context("failed to parse device code response")?;
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -115,25 +89,27 @@ impl LoginCommand {
|
||||||
response.verification_uri.as_str().blue()
|
response.verification_uri.as_str().blue()
|
||||||
);
|
);
|
||||||
|
|
||||||
{
|
spawn(move || {
|
||||||
let mut input = String::new();
|
{
|
||||||
std::io::stdin()
|
let mut input = String::new();
|
||||||
.read_line(&mut input)
|
std::io::stdin()
|
||||||
.context("failed to read input")?;
|
.read_line(&mut input)
|
||||||
}
|
.expect("failed to read input");
|
||||||
|
|
||||||
match open::that(response.verification_uri.as_str()) {
|
|
||||||
Ok(_) => (),
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("failed to open browser: {e}");
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
match open::that(response.verification_uri.as_str()) {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("failed to open browser: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
let mut time_left = response.expires_in;
|
let mut time_left = response.expires_in;
|
||||||
let mut interval = std::time::Duration::from_secs(response.interval);
|
let mut interval = std::time::Duration::from_secs(response.interval);
|
||||||
|
|
||||||
while time_left > 0 {
|
while time_left > 0 {
|
||||||
std::thread::sleep(interval);
|
sleep(interval).await;
|
||||||
time_left = time_left.saturating_sub(interval.as_secs());
|
time_left = time_left.saturating_sub(interval.as_secs());
|
||||||
|
|
||||||
let response = reqwest
|
let response = reqwest
|
||||||
|
@ -149,10 +125,12 @@ impl LoginCommand {
|
||||||
],
|
],
|
||||||
)?)
|
)?)
|
||||||
.send()
|
.send()
|
||||||
|
.await
|
||||||
.context("failed to send access token request")?
|
.context("failed to send access token request")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.context("failed to get access token response")?
|
.context("failed to get access token response")?
|
||||||
.json::<AccessTokenResponse>()
|
.json::<AccessTokenResponse>()
|
||||||
|
.await
|
||||||
.context("failed to parse access token response")?;
|
.context("failed to parse access token response")?;
|
||||||
|
|
||||||
match response {
|
match response {
|
||||||
|
@ -180,24 +158,35 @@ impl LoginCommand {
|
||||||
anyhow::bail!("code expired, please re-run the login command");
|
anyhow::bail!("code expired, please re-run the login command");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(
|
||||||
|
self,
|
||||||
|
index_url: gix::Url,
|
||||||
|
project: Project,
|
||||||
|
reqwest: reqwest::Client,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
let token_given = self.token.is_some();
|
let token_given = self.token.is_some();
|
||||||
let token = match self.token {
|
let token = match self.token {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => self.authenticate_device_flow(&project, &reqwest)?,
|
None => {
|
||||||
|
self.authenticate_device_flow(&index_url, &project, &reqwest)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let token = if token_given {
|
let token = if token_given {
|
||||||
println!("set token");
|
println!("set token for {index_url}");
|
||||||
token
|
token
|
||||||
} else {
|
} else {
|
||||||
let token = format!("Bearer {token}");
|
let token = format!("Bearer {token}");
|
||||||
println!("logged in as {}", get_token_login(&reqwest, &token)?.bold());
|
println!(
|
||||||
|
"logged in as {} for {index_url}",
|
||||||
|
get_token_login(&reqwest, &token).await?.bold()
|
||||||
|
);
|
||||||
|
|
||||||
token
|
token
|
||||||
};
|
};
|
||||||
|
|
||||||
set_token(Some(&token))?;
|
set_token(&index_url, Some(&token)).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,10 @@ use clap::Args;
|
||||||
pub struct LogoutCommand {}
|
pub struct LogoutCommand {}
|
||||||
|
|
||||||
impl LogoutCommand {
|
impl LogoutCommand {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||||
set_token(None)?;
|
set_token(&index_url, None).await?;
|
||||||
|
|
||||||
println!("logged out");
|
println!("logged out of {index_url}");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,31 +1,73 @@
|
||||||
use clap::Subcommand;
|
use crate::cli::config::read_config;
|
||||||
use pesde::Project;
|
use clap::{Args, Subcommand};
|
||||||
|
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
|
||||||
|
|
||||||
mod login;
|
mod login;
|
||||||
mod logout;
|
mod logout;
|
||||||
mod set_token_override;
|
mod token;
|
||||||
mod whoami;
|
mod whoami;
|
||||||
|
|
||||||
|
#[derive(Debug, Args)]
|
||||||
|
pub struct AuthSubcommand {
|
||||||
|
/// The index to use. Defaults to `default`, or the configured default index if current directory doesn't have a manifest
|
||||||
|
#[arg(short, long)]
|
||||||
|
pub index: Option<String>,
|
||||||
|
|
||||||
|
#[clap(subcommand)]
|
||||||
|
pub command: AuthCommands,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
pub enum AuthCommands {
|
pub enum AuthCommands {
|
||||||
/// Logs in into GitHub, and stores the token
|
/// Sets a token for an index. Optionally gets it from GitHub
|
||||||
Login(login::LoginCommand),
|
Login(login::LoginCommand),
|
||||||
/// Removes the stored token
|
/// Removes the stored token
|
||||||
Logout(logout::LogoutCommand),
|
Logout(logout::LogoutCommand),
|
||||||
/// Prints the username of the currently logged-in user
|
/// Prints the username of the currently logged-in user
|
||||||
#[clap(name = "whoami")]
|
#[clap(name = "whoami")]
|
||||||
WhoAmI(whoami::WhoAmICommand),
|
WhoAmI(whoami::WhoAmICommand),
|
||||||
/// Sets a token override for a specific repository
|
/// Prints the token for an index
|
||||||
SetTokenOverride(set_token_override::SetTokenOverrideCommand),
|
Token(token::TokenCommand),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthCommands {
|
impl AuthSubcommand {
|
||||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
match self {
|
let manifest = match project.deser_manifest().await {
|
||||||
AuthCommands::Login(login) => login.run(project, reqwest),
|
Ok(manifest) => Some(manifest),
|
||||||
AuthCommands::Logout(logout) => logout.run(),
|
Err(e) => match e {
|
||||||
AuthCommands::WhoAmI(whoami) => whoami.run(reqwest),
|
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
|
||||||
AuthCommands::SetTokenOverride(set_token_override) => set_token_override.run(),
|
e => return Err(e.into()),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let index_url = match self.index.as_deref() {
|
||||||
|
Some(index) => match index.try_into() {
|
||||||
|
Ok(url) => Some(url),
|
||||||
|
Err(_) => None,
|
||||||
|
},
|
||||||
|
None => match manifest {
|
||||||
|
Some(_) => None,
|
||||||
|
None => Some(read_config().await?.default_index),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let index_url = match index_url {
|
||||||
|
Some(url) => url,
|
||||||
|
None => {
|
||||||
|
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
|
|
||||||
|
match manifest.unwrap().indices.get(index_name) {
|
||||||
|
Some(index) => index.clone(),
|
||||||
|
None => anyhow::bail!("index {index_name} not found in manifest"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.command {
|
||||||
|
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
|
||||||
|
AuthCommands::Logout(logout) => logout.run(index_url).await,
|
||||||
|
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
|
||||||
|
AuthCommands::Token(token) => token.run(index_url).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
use crate::cli::config::{read_config, write_config};
|
|
||||||
use clap::Args;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
|
||||||
pub struct SetTokenOverrideCommand {
|
|
||||||
/// The repository to add the token to
|
|
||||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
|
||||||
repository: gix::Url,
|
|
||||||
|
|
||||||
/// The token to set
|
|
||||||
#[arg(index = 2)]
|
|
||||||
token: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SetTokenOverrideCommand {
|
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
|
||||||
let mut config = read_config()?;
|
|
||||||
|
|
||||||
if let Some(token) = self.token {
|
|
||||||
println!("set token for {}", self.repository);
|
|
||||||
config.token_overrides.insert(self.repository, token);
|
|
||||||
} else {
|
|
||||||
println!("removed token for {}", self.repository);
|
|
||||||
config.token_overrides.remove(&self.repository);
|
|
||||||
}
|
|
||||||
|
|
||||||
write_config(&config)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
22
src/cli/commands/auth/token.rs
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
use crate::cli::auth::get_tokens;
|
||||||
|
use clap::Args;
|
||||||
|
|
||||||
|
#[derive(Debug, Args)]
|
||||||
|
pub struct TokenCommand {}
|
||||||
|
|
||||||
|
impl TokenCommand {
|
||||||
|
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||||
|
let tokens = get_tokens().await?;
|
||||||
|
let token = match tokens.0.get(&index_url) {
|
||||||
|
Some(token) => token,
|
||||||
|
None => {
|
||||||
|
println!("not logged in into {index_url}");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("token for {index_url}: \"{token}\"");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::cli::{auth::get_token_login, get_token};
|
use crate::cli::auth::{get_token_login, get_tokens};
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
|
@ -6,16 +6,20 @@ use colored::Colorize;
|
||||||
pub struct WhoAmICommand {}
|
pub struct WhoAmICommand {}
|
||||||
|
|
||||||
impl WhoAmICommand {
|
impl WhoAmICommand {
|
||||||
pub fn run(self, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let token = match get_token()? {
|
let tokens = get_tokens().await?;
|
||||||
|
let token = match tokens.0.get(&index_url) {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => {
|
None => {
|
||||||
println!("not logged in");
|
println!("not logged in into {index_url}");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("logged in as {}", get_token_login(&reqwest, &token)?.bold());
|
println!(
|
||||||
|
"logged in as {} into {index_url}",
|
||||||
|
get_token_login(&reqwest, token).await?.bold()
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,8 +13,8 @@ pub struct DefaultIndexCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefaultIndexCommand {
|
impl DefaultIndexCommand {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub async fn run(self) -> anyhow::Result<()> {
|
||||||
let mut config = read_config()?;
|
let mut config = read_config().await?;
|
||||||
|
|
||||||
let index = if self.reset {
|
let index = if self.reset {
|
||||||
Some(CliConfig::default().default_index)
|
Some(CliConfig::default().default_index)
|
||||||
|
@ -25,7 +25,7 @@ impl DefaultIndexCommand {
|
||||||
match index {
|
match index {
|
||||||
Some(index) => {
|
Some(index) => {
|
||||||
config.default_index = index.clone();
|
config.default_index = index.clone();
|
||||||
write_config(&config)?;
|
write_config(&config).await?;
|
||||||
println!("default index set to: {index}");
|
println!("default index set to: {index}");
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -1,22 +1,17 @@
|
||||||
use clap::Subcommand;
|
use clap::Subcommand;
|
||||||
|
|
||||||
mod default_index;
|
mod default_index;
|
||||||
mod scripts_repo;
|
|
||||||
|
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
pub enum ConfigCommands {
|
pub enum ConfigCommands {
|
||||||
/// Configuration for the default index
|
/// Configuration for the default index
|
||||||
DefaultIndex(default_index::DefaultIndexCommand),
|
DefaultIndex(default_index::DefaultIndexCommand),
|
||||||
|
|
||||||
/// Configuration for the scripts repository
|
|
||||||
ScriptsRepo(scripts_repo::ScriptsRepoCommand),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigCommands {
|
impl ConfigCommands {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub async fn run(self) -> anyhow::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
ConfigCommands::DefaultIndex(default_index) => default_index.run(),
|
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
||||||
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
use crate::cli::{
|
|
||||||
config::{read_config, write_config, CliConfig},
|
|
||||||
home_dir,
|
|
||||||
};
|
|
||||||
use anyhow::Context;
|
|
||||||
use clap::Args;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
|
||||||
pub struct ScriptsRepoCommand {
|
|
||||||
/// The new repo URL to set as default, don't pass any value to check the current default repo
|
|
||||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
|
||||||
repo: Option<gix::Url>,
|
|
||||||
|
|
||||||
/// Resets the default repo to the default value
|
|
||||||
#[arg(short, long, conflicts_with = "repo")]
|
|
||||||
reset: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ScriptsRepoCommand {
|
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
|
||||||
let mut config = read_config()?;
|
|
||||||
|
|
||||||
let repo = if self.reset {
|
|
||||||
Some(CliConfig::default().scripts_repo)
|
|
||||||
} else {
|
|
||||||
self.repo
|
|
||||||
};
|
|
||||||
|
|
||||||
match repo {
|
|
||||||
Some(repo) => {
|
|
||||||
config.scripts_repo = repo.clone();
|
|
||||||
write_config(&config)?;
|
|
||||||
|
|
||||||
std::fs::remove_dir_all(home_dir()?.join("scripts"))
|
|
||||||
.context("failed to remove scripts directory")?;
|
|
||||||
|
|
||||||
println!("scripts repo set to: {repo}");
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("current scripts repo: {}", config.scripts_repo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::cli::{config::read_config, VersionedPackageName};
|
use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
linking::generator::generate_bin_linking_module,
|
linking::generator::generate_bin_linking_module,
|
||||||
manifest::target::TargetKind,
|
manifest::target::TargetKind,
|
||||||
|
@ -12,7 +13,10 @@ use pesde::{
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::{env::current_dir, ffi::OsString, io::Write, process::Command};
|
use std::{
|
||||||
|
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
|
||||||
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct ExecuteCommand {
|
pub struct ExecuteCommand {
|
||||||
|
@ -30,14 +34,16 @@ pub struct ExecuteCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExecuteCommand {
|
impl ExecuteCommand {
|
||||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let index = self
|
let index = match self.index {
|
||||||
.index
|
Some(index) => Some(index),
|
||||||
.or_else(|| read_config().ok().map(|c| c.default_index))
|
None => read_config().await.ok().map(|c| c.default_index),
|
||||||
.context("no index specified")?;
|
}
|
||||||
|
.context("no index specified")?;
|
||||||
let source = PesdePackageSource::new(index);
|
let source = PesdePackageSource::new(index);
|
||||||
source
|
source
|
||||||
.refresh(&project)
|
.refresh(&project)
|
||||||
|
.await
|
||||||
.context("failed to refresh source")?;
|
.context("failed to refresh source")?;
|
||||||
|
|
||||||
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
|
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
|
||||||
|
@ -50,7 +56,8 @@ impl ExecuteCommand {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(res) = source
|
if let Some(res) = source
|
||||||
.resolve(&specifier, &project, TargetKind::Lune)
|
.resolve(&specifier, &project, TargetKind::Lune, &mut HashSet::new())
|
||||||
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
.1
|
.1
|
||||||
.pop_last()
|
.pop_last()
|
||||||
|
@ -59,7 +66,8 @@ impl ExecuteCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
source
|
source
|
||||||
.resolve(&specifier, &project, TargetKind::Luau)
|
.resolve(&specifier, &project, TargetKind::Luau, &mut HashSet::new())
|
||||||
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
.1
|
.1
|
||||||
.pop_last()
|
.pop_last()
|
||||||
|
@ -70,22 +78,66 @@ impl ExecuteCommand {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
log::info!("found package {}@{version}", pkg_ref.name);
|
println!("using {}@{version}", pkg_ref.name);
|
||||||
|
|
||||||
let (fs, target) = source
|
|
||||||
.download(&pkg_ref, &project, &reqwest)
|
|
||||||
.context("failed to download package")?;
|
|
||||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
|
||||||
|
|
||||||
let tmp_dir = project.cas_dir().join(".tmp");
|
let tmp_dir = project.cas_dir().join(".tmp");
|
||||||
std::fs::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
|
fs::create_dir_all(&tmp_dir)
|
||||||
|
.await
|
||||||
|
.context("failed to create temporary directory")?;
|
||||||
let tempdir =
|
let tempdir =
|
||||||
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
||||||
|
|
||||||
|
let project = Project::new(
|
||||||
|
tempdir.path(),
|
||||||
|
None::<std::path::PathBuf>,
|
||||||
|
project.data_dir(),
|
||||||
|
project.cas_dir(),
|
||||||
|
project.auth_config().clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let (fs, target) = source
|
||||||
|
.download(&pkg_ref, &project, &reqwest)
|
||||||
|
.await
|
||||||
|
.context("failed to download package")?;
|
||||||
|
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||||
|
|
||||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||||
|
.await
|
||||||
.context("failed to write package contents")?;
|
.context("failed to write package contents")?;
|
||||||
|
|
||||||
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
|
let graph = project
|
||||||
|
.dependency_graph(None, &mut refreshed_sources, true)
|
||||||
|
.await
|
||||||
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
|
|
||||||
|
let (rx, downloaded_graph) = project
|
||||||
|
.download_and_link(
|
||||||
|
&graph,
|
||||||
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
|
&reqwest,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
|_| async { Ok::<_, std::io::Error>(()) },
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
|
progress_bar(
|
||||||
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
|
rx,
|
||||||
|
"📥 ".to_string(),
|
||||||
|
"downloading dependencies".to_string(),
|
||||||
|
"downloaded dependencies".to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
downloaded_graph
|
||||||
|
.await
|
||||||
|
.context("failed to download & link dependencies")?;
|
||||||
|
|
||||||
let mut caller =
|
let mut caller =
|
||||||
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
||||||
caller
|
caller
|
||||||
|
|
|
@ -1,32 +1,44 @@
|
||||||
use std::{path::Path, str::FromStr};
|
use crate::cli::config::read_config;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use inquire::validator::Validation;
|
use inquire::validator::Validation;
|
||||||
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
errors::ManifestReadError, names::PackageName, scripts::ScriptName, Project, DEFAULT_INDEX_NAME,
|
errors::ManifestReadError,
|
||||||
|
manifest::{target::TargetKind, DependencyType},
|
||||||
|
names::PackageName,
|
||||||
|
source::{
|
||||||
|
git_index::GitBasedSource,
|
||||||
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
|
specifiers::DependencySpecifiers,
|
||||||
|
traits::PackageSource,
|
||||||
|
},
|
||||||
|
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
|
use semver::VersionReq;
|
||||||
use crate::cli::{config::read_config, HOME_DIR};
|
use std::{collections::HashSet, fmt::Display, str::FromStr};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct InitCommand {}
|
pub struct InitCommand {}
|
||||||
|
|
||||||
fn script_contents(path: &Path) -> String {
|
#[derive(Debug)]
|
||||||
format!(
|
enum PackageNameOrCustom {
|
||||||
r#"local process = require("@lune/process")
|
PackageName(PackageName),
|
||||||
local home_dir = if process.os == "windows" then process.env.userprofile else process.env.HOME
|
Custom,
|
||||||
|
}
|
||||||
|
|
||||||
require(home_dir .. {:?})"#,
|
impl Display for PackageNameOrCustom {
|
||||||
format!("/{HOME_DIR}/scripts/{}", path.display())
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
)
|
match self {
|
||||||
|
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
|
||||||
|
PackageNameOrCustom::Custom => write!(f, "custom"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InitCommand {
|
impl InitCommand {
|
||||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
match project.read_manifest() {
|
match project.read_manifest().await {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
println!("{}", "project already initialized".red());
|
println!("{}", "project already initialized".red());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -38,7 +50,7 @@ impl InitCommand {
|
||||||
let mut manifest = toml_edit::DocumentMut::new();
|
let mut manifest = toml_edit::DocumentMut::new();
|
||||||
|
|
||||||
manifest["name"] = toml_edit::value(
|
manifest["name"] = toml_edit::value(
|
||||||
inquire::Text::new("What is the name of the project?")
|
inquire::Text::new("what is the name of the project?")
|
||||||
.with_validator(|name: &str| {
|
.with_validator(|name: &str| {
|
||||||
Ok(match PackageName::from_str(name) {
|
Ok(match PackageName::from_str(name) {
|
||||||
Ok(_) => Validation::Valid,
|
Ok(_) => Validation::Valid,
|
||||||
|
@ -50,20 +62,19 @@ impl InitCommand {
|
||||||
);
|
);
|
||||||
manifest["version"] = toml_edit::value("0.1.0");
|
manifest["version"] = toml_edit::value("0.1.0");
|
||||||
|
|
||||||
let description =
|
let description = inquire::Text::new("what is the description of the project?")
|
||||||
inquire::Text::new("What is the description of the project? (leave empty for none)")
|
.with_help_message("a short description of the project. leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
if !description.is_empty() {
|
if !description.is_empty() {
|
||||||
manifest["description"] = toml_edit::value(description);
|
manifest["description"] = toml_edit::value(description);
|
||||||
}
|
}
|
||||||
|
|
||||||
let authors = inquire::Text::new(
|
let authors = inquire::Text::new("who are the authors of this project?")
|
||||||
"Who are the authors of this project? (leave empty for none, comma separated)",
|
.with_help_message("comma separated list. leave empty for none")
|
||||||
)
|
.prompt()
|
||||||
.prompt()
|
.unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let authors = authors
|
let authors = authors
|
||||||
.split(',')
|
.split(',')
|
||||||
|
@ -75,102 +86,177 @@ impl InitCommand {
|
||||||
manifest["authors"] = toml_edit::value(authors);
|
manifest["authors"] = toml_edit::value(authors);
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = inquire::Text::new(
|
let repo = inquire::Text::new("what is the repository URL of this project?")
|
||||||
"What is the repository URL of this project? (leave empty for none)",
|
.with_validator(|repo: &str| {
|
||||||
)
|
if repo.is_empty() {
|
||||||
.with_validator(|repo: &str| {
|
return Ok(Validation::Valid);
|
||||||
if repo.is_empty() {
|
}
|
||||||
return Ok(Validation::Valid);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(match url::Url::parse(repo) {
|
Ok(match url::Url::parse(repo) {
|
||||||
Ok(_) => Validation::Valid,
|
Ok(_) => Validation::Valid,
|
||||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
.with_help_message("leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if !repo.is_empty() {
|
if !repo.is_empty() {
|
||||||
manifest["repository"] = toml_edit::value(repo);
|
manifest["repository"] = toml_edit::value(repo);
|
||||||
}
|
}
|
||||||
|
|
||||||
let license =
|
let license = inquire::Text::new("what is the license of this project?")
|
||||||
inquire::Text::new("What is the license of this project? (leave empty for none)")
|
.with_initial_value("MIT")
|
||||||
.with_initial_value("MIT")
|
.with_help_message("an SPDX license identifier. leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if !license.is_empty() {
|
if !license.is_empty() {
|
||||||
manifest["license"] = toml_edit::value(license);
|
manifest["license"] = toml_edit::value(license);
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_env = inquire::Select::new(
|
let target_env = inquire::Select::new(
|
||||||
"What environment are you targeting for your package?",
|
"what environment are you targeting for your package?",
|
||||||
vec!["roblox", "roblox_server", "lune", "luau"],
|
TargetKind::VARIANTS.to_vec(),
|
||||||
)
|
)
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
["environment"] = toml_edit::value(target_env);
|
["environment"] = toml_edit::value(target_env.to_string());
|
||||||
|
|
||||||
if target_env == "roblox"
|
let source = PesdePackageSource::new(read_config().await?.default_index);
|
||||||
|| target_env == "roblox_server"
|
|
||||||
|| inquire::Confirm::new(&format!(
|
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
"Would you like to setup a default {} script?",
|
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
|
||||||
))
|
if target_env.is_roblox()
|
||||||
.prompt()
|
|| inquire::prompt_confirmation(
|
||||||
|
"would you like to setup default Roblox compatibility scripts?",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
{
|
{
|
||||||
let folder = project
|
PackageSource::refresh(&source, &project)
|
||||||
.package_dir()
|
.await
|
||||||
.join(concat!(".", env!("CARGO_PKG_NAME")));
|
.context("failed to refresh package source")?;
|
||||||
std::fs::create_dir_all(&folder).context("failed to create scripts folder")?;
|
let config = source
|
||||||
|
.config(&project)
|
||||||
|
.await
|
||||||
|
.context("failed to get source config")?;
|
||||||
|
|
||||||
std::fs::write(
|
let scripts_package = if config.scripts_packages.is_empty() {
|
||||||
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
|
PackageNameOrCustom::Custom
|
||||||
script_contents(Path::new(&format!(
|
} else {
|
||||||
"lune/rojo/{}.luau",
|
inquire::Select::new(
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
"which scripts package do you want to use?",
|
||||||
))),
|
config
|
||||||
)
|
.scripts_packages
|
||||||
.context("failed to write sync config generator script file")?;
|
.into_iter()
|
||||||
|
.map(PackageNameOrCustom::PackageName)
|
||||||
|
.chain(std::iter::once(PackageNameOrCustom::Custom))
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
.prompt()
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
let scripts_package = match scripts_package {
|
||||||
std::fs::write(
|
PackageNameOrCustom::PackageName(p) => Some(p),
|
||||||
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
|
PackageNameOrCustom::Custom => {
|
||||||
script_contents(Path::new(&format!(
|
let name = inquire::Text::new("which scripts package to use?")
|
||||||
"lune/rojo/{}.luau",
|
.with_validator(|name: &str| {
|
||||||
ScriptName::SourcemapGenerator
|
if name.is_empty() {
|
||||||
))),
|
return Ok(Validation::Valid);
|
||||||
)
|
}
|
||||||
.context("failed to write sourcemap generator script file")?;
|
|
||||||
|
|
||||||
let scripts =
|
Ok(match PackageName::from_str(name) {
|
||||||
manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
Ok(_) => Validation::Valid,
|
||||||
|
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.with_help_message("leave empty for none")
|
||||||
|
.prompt()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
scripts[&ScriptName::RobloxSyncConfigGenerator.to_string()] =
|
if name.is_empty() {
|
||||||
toml_edit::value(format!(
|
None
|
||||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
} else {
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
Some(PackageName::from_str(&name).unwrap())
|
||||||
));
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
if let Some(scripts_pkg_name) = scripts_package {
|
||||||
{
|
let (v_id, pkg_ref) = source
|
||||||
scripts[&ScriptName::SourcemapGenerator.to_string()] = toml_edit::value(format!(
|
.resolve(
|
||||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
&PesdeDependencySpecifier {
|
||||||
ScriptName::SourcemapGenerator
|
name: scripts_pkg_name,
|
||||||
));
|
version: VersionReq::STAR,
|
||||||
|
index: None,
|
||||||
|
target: None,
|
||||||
|
},
|
||||||
|
&project,
|
||||||
|
TargetKind::Lune,
|
||||||
|
&mut HashSet::new(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to resolve scripts package")?
|
||||||
|
.1
|
||||||
|
.pop_last()
|
||||||
|
.context("scripts package not found")?;
|
||||||
|
|
||||||
|
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
|
||||||
|
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
|
||||||
|
};
|
||||||
|
|
||||||
|
let scripts_field = &mut manifest["scripts"]
|
||||||
|
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||||
|
|
||||||
|
for script_name in scripts.keys() {
|
||||||
|
scripts_field[script_name] = toml_edit::value(format!(
|
||||||
|
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let dev_deps = &mut manifest["dev_dependencies"]
|
||||||
|
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||||
|
|
||||||
|
let field = &mut dev_deps["scripts"];
|
||||||
|
field["name"] = toml_edit::value(pkg_ref.name.to_string());
|
||||||
|
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
|
||||||
|
field["target"] = toml_edit::value(v_id.target().to_string());
|
||||||
|
|
||||||
|
for (alias, (spec, ty)) in pkg_ref.dependencies {
|
||||||
|
if ty != DependencyType::Peer {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let DependencySpecifiers::Pesde(spec) = spec else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let field = &mut dev_deps[alias];
|
||||||
|
field["name"] = toml_edit::value(spec.name.to_string());
|
||||||
|
field["version"] = toml_edit::value(spec.version.to_string());
|
||||||
|
field["target"] =
|
||||||
|
toml_edit::value(spec.target.unwrap_or_else(|| *v_id.target()).to_string());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
"no scripts package configured, this can cause issues with Roblox compatibility".red()
|
||||||
|
);
|
||||||
|
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
project.write_manifest(manifest.to_string()).await?;
|
||||||
[DEFAULT_INDEX_NAME] =
|
|
||||||
toml_edit::value(read_config()?.default_index.to_bstring().to_string());
|
|
||||||
|
|
||||||
project.write_manifest(manifest.to_string())?;
|
println!(
|
||||||
|
"{}\n{}: run `install` to fully finish setup",
|
||||||
println!("{}", "initialized project".green());
|
"initialized project".green(),
|
||||||
|
"tip".cyan().bold()
|
||||||
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
bin_dir, download_graph, files::make_executable, run_on_workspace_members, up_to_date_lockfile,
|
bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::{ColoredString, Colorize};
|
use colored::{ColoredString, Colorize};
|
||||||
use indicatif::MultiProgress;
|
use fs_err::tokio as fs;
|
||||||
|
use futures::future::try_join_all;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
lockfile::Lockfile,
|
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
|
||||||
manifest::{target::TargetKind, DependencyType},
|
MANIFEST_FILE_NAME,
|
||||||
Project, MANIFEST_FILE_NAME,
|
|
||||||
};
|
};
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::{
|
||||||
|
collections::{BTreeSet, HashMap, HashSet},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args, Copy, Clone)]
|
#[derive(Debug, Args, Copy, Clone)]
|
||||||
pub struct InstallCommand {
|
pub struct InstallCommand {
|
||||||
/// The amount of threads to use for downloading
|
|
||||||
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
|
|
||||||
threads: u64,
|
|
||||||
|
|
||||||
/// Whether to error on changes in the lockfile
|
/// Whether to error on changes in the lockfile
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
locked: bool,
|
locked: bool,
|
||||||
|
@ -44,12 +44,8 @@ fn bin_link_file(alias: &str) -> String {
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
let prefix = String::new();
|
|
||||||
#[cfg(unix)]
|
|
||||||
let prefix = "#!/usr/bin/env -S lune run\n";
|
|
||||||
format!(
|
format!(
|
||||||
r#"{prefix}local process = require("@lune/process")
|
r#"local process = require("@lune/process")
|
||||||
local fs = require("@lune/fs")
|
local fs = require("@lune/fs")
|
||||||
local stdio = require("@lune/stdio")
|
local stdio = require("@lune/stdio")
|
||||||
|
|
||||||
|
@ -79,29 +75,29 @@ stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the r
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
const JOBS: u8 = 6;
|
|
||||||
#[cfg(not(feature = "patches"))]
|
|
||||||
const JOBS: u8 = 5;
|
const JOBS: u8 = 5;
|
||||||
|
#[cfg(not(feature = "patches"))]
|
||||||
|
const JOBS: u8 = 4;
|
||||||
|
|
||||||
fn job(n: u8) -> ColoredString {
|
fn job(n: u8) -> ColoredString {
|
||||||
format!("[{n}/{JOBS}]").dimmed().bold()
|
format!("[{n}/{JOBS}]").dimmed().bold()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
#[error(transparent)]
|
||||||
|
struct CallbackError(#[from] anyhow::Error);
|
||||||
|
|
||||||
impl InstallCommand {
|
impl InstallCommand {
|
||||||
pub fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::blocking::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
|
|
||||||
let lockfile = if self.locked {
|
let lockfile = if self.locked {
|
||||||
match up_to_date_lockfile(&project)? {
|
match up_to_date_lockfile(&project).await? {
|
||||||
None => {
|
None => {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"lockfile is out of sync, run `{} install` to update it",
|
"lockfile is out of sync, run `{} install` to update it",
|
||||||
|
@ -111,13 +107,13 @@ impl InstallCommand {
|
||||||
file => file,
|
file => file,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match project.deser_lockfile() {
|
match project.deser_lockfile().await {
|
||||||
Ok(lockfile) => {
|
Ok(lockfile) => {
|
||||||
if lockfile.overrides != manifest.overrides {
|
if lockfile.overrides != manifest.overrides {
|
||||||
log::debug!("overrides are different");
|
tracing::debug!("overrides are different");
|
||||||
None
|
None
|
||||||
} else if lockfile.target != manifest.target.kind() {
|
} else if lockfile.target != manifest.target.kind() {
|
||||||
log::debug!("target kind is different");
|
tracing::debug!("target kind is different");
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(lockfile)
|
Some(lockfile)
|
||||||
|
@ -142,22 +138,32 @@ impl InstallCommand {
|
||||||
println!("{} ❌ removing current package folders", job(1));
|
println!("{} ❌ removing current package folders", job(1));
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut deleted_folders = HashSet::new();
|
let mut deleted_folders = HashMap::new();
|
||||||
|
|
||||||
for target_kind in TargetKind::VARIANTS {
|
for target_kind in TargetKind::VARIANTS {
|
||||||
let folder = manifest.target.kind().packages_folder(target_kind);
|
let folder = manifest.target.kind().packages_folder(target_kind);
|
||||||
|
let package_dir = project.package_dir();
|
||||||
|
|
||||||
if deleted_folders.insert(folder.to_string()) {
|
deleted_folders
|
||||||
log::debug!("deleting the {folder} folder");
|
.entry(folder.to_string())
|
||||||
|
.or_insert_with(|| async move {
|
||||||
|
tracing::debug!("deleting the {folder} folder");
|
||||||
|
|
||||||
if let Some(e) = std::fs::remove_dir_all(project.package_dir().join(&folder))
|
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
|
||||||
.err()
|
.await
|
||||||
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
|
.err()
|
||||||
{
|
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
|
||||||
return Err(e).context(format!("failed to remove the {folder} folder"));
|
{
|
||||||
};
|
return Err(e).context(format!("failed to remove the {folder} folder"));
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try_join_all(deleted_folders.into_values())
|
||||||
|
.await
|
||||||
|
.context("failed to remove package folders")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let old_graph = lockfile.map(|lockfile| {
|
let old_graph = lockfile.map(|lockfile| {
|
||||||
|
@ -179,87 +185,122 @@ impl InstallCommand {
|
||||||
println!("{} 📦 building dependency graph", job(2));
|
println!("{} 📦 building dependency graph", job(2));
|
||||||
|
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources)
|
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
|
||||||
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
|
|
||||||
let downloaded_graph = download_graph(
|
let bin_folder = bin_dir().await?;
|
||||||
&project,
|
|
||||||
&mut refreshed_sources,
|
let downloaded_graph = {
|
||||||
&graph,
|
let (rx, downloaded_graph) = project
|
||||||
&multi,
|
.download_and_link(
|
||||||
&reqwest,
|
&graph,
|
||||||
self.threads as usize,
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
self.prod,
|
&reqwest,
|
||||||
true,
|
self.prod,
|
||||||
format!("{} 📥 downloading dependencies", job(3)),
|
true,
|
||||||
format!("{} 📥 downloaded dependencies", job(3)),
|
|graph| {
|
||||||
)?;
|
let graph = graph.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
try_join_all(
|
||||||
|
graph
|
||||||
|
.values()
|
||||||
|
.flat_map(|versions| versions.values())
|
||||||
|
.filter(|node| node.target.bin_path().is_some())
|
||||||
|
.filter_map(|node| node.node.direct.as_ref())
|
||||||
|
.map(|(alias, _, _)| alias)
|
||||||
|
.filter(|alias| {
|
||||||
|
if *alias == env!("CARGO_BIN_NAME") {
|
||||||
|
tracing::warn!(
|
||||||
|
"package {alias} has the same name as the CLI, skipping bin link"
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
})
|
||||||
|
.map(|alias| {
|
||||||
|
let bin_folder = bin_folder.clone();
|
||||||
|
async move {
|
||||||
|
let bin_exec_file = bin_folder.join(alias).with_extension(std::env::consts::EXE_EXTENSION);
|
||||||
|
|
||||||
|
let impl_folder = bin_folder.join(".impl");
|
||||||
|
fs::create_dir_all(&impl_folder).await.context("failed to create bin link folder")?;
|
||||||
|
|
||||||
|
let bin_file = impl_folder.join(alias).with_extension("luau");
|
||||||
|
fs::write(&bin_file, bin_link_file(alias))
|
||||||
|
.await
|
||||||
|
.context("failed to write bin link file")?;
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
fs::copy(
|
||||||
|
std::env::current_exe()
|
||||||
|
.context("failed to get current executable path")?,
|
||||||
|
&bin_exec_file,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to copy bin link file")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
{
|
||||||
|
fs::write(
|
||||||
|
&bin_exec_file,
|
||||||
|
format!(r#"#!/bin/sh
|
||||||
|
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to link bin link file")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
make_executable(&bin_exec_file).await.context("failed to make bin link file executable")?;
|
||||||
|
|
||||||
|
Ok::<_, CallbackError>(())
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
|
progress_bar(
|
||||||
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
|
rx,
|
||||||
|
format!("{} 📥 ", job(3)),
|
||||||
|
"downloading dependencies".to_string(),
|
||||||
|
"downloaded dependencies".to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let filtered_graph = if self.prod {
|
|
||||||
downloaded_graph
|
downloaded_graph
|
||||||
.clone()
|
.await
|
||||||
.into_iter()
|
.context("failed to download & link dependencies")?
|
||||||
.map(|(n, v)| {
|
|
||||||
(
|
|
||||||
n,
|
|
||||||
v.into_iter()
|
|
||||||
.filter(|(_, n)| n.node.ty != DependencyType::Dev)
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
downloaded_graph.clone()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{} 🗺️ linking dependencies", job(4));
|
|
||||||
|
|
||||||
project
|
|
||||||
.link_dependencies(&filtered_graph)
|
|
||||||
.context("failed to link dependencies")?;
|
|
||||||
|
|
||||||
let bin_folder = bin_dir()?;
|
|
||||||
|
|
||||||
for versions in filtered_graph.values() {
|
|
||||||
for node in versions.values() {
|
|
||||||
if node.target.bin_path().is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some((alias, _)) = &node.node.direct else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
if alias == env!("CARGO_BIN_NAME") {
|
|
||||||
log::warn!("package {alias} has the same name as the CLI, skipping bin link");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let bin_file = bin_folder.join(alias);
|
|
||||||
std::fs::write(&bin_file, bin_link_file(alias))
|
|
||||||
.context("failed to write bin link file")?;
|
|
||||||
|
|
||||||
make_executable(&bin_file).context("failed to make bin link executable")?;
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
|
|
||||||
std::fs::copy(
|
|
||||||
std::env::current_exe().context("failed to get current executable path")?,
|
|
||||||
&bin_file,
|
|
||||||
)
|
|
||||||
.context("failed to copy bin link file")?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
{
|
{
|
||||||
println!("{} 🩹 applying patches", job(5));
|
let rx = project
|
||||||
|
.apply_patches(&filter_graph(&downloaded_graph, self.prod))
|
||||||
project
|
.await
|
||||||
.apply_patches(&filtered_graph)
|
|
||||||
.context("failed to apply patches")?;
|
.context("failed to apply patches")?;
|
||||||
|
|
||||||
|
progress_bar(
|
||||||
|
manifest.patches.values().map(|v| v.len() as u64).sum(),
|
||||||
|
rx,
|
||||||
|
format!("{} 🩹 ", job(JOBS - 1)),
|
||||||
|
"applying patches".to_string(),
|
||||||
|
"applied patches".to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("{} 🧹 finishing up", job(JOBS));
|
println!("{} 🧹 finishing up", job(JOBS));
|
||||||
|
@ -274,9 +315,12 @@ impl InstallCommand {
|
||||||
graph: downloaded_graph,
|
graph: downloaded_graph,
|
||||||
|
|
||||||
workspace: run_on_workspace_members(&project, |project| {
|
workspace: run_on_workspace_members(&project, |project| {
|
||||||
self.run(project, multi.clone(), reqwest.clone())
|
let reqwest = reqwest.clone();
|
||||||
})?,
|
async move { Box::pin(self.run(project, reqwest)).await }
|
||||||
|
})
|
||||||
|
.await?,
|
||||||
})
|
})
|
||||||
|
.await
|
||||||
.context("failed to write lockfile")?;
|
.context("failed to write lockfile")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::Project;
|
use pesde::Project;
|
||||||
|
|
||||||
mod add;
|
mod add;
|
||||||
|
@ -23,8 +22,7 @@ mod update;
|
||||||
#[derive(Debug, clap::Subcommand)]
|
#[derive(Debug, clap::Subcommand)]
|
||||||
pub enum Subcommand {
|
pub enum Subcommand {
|
||||||
/// Authentication-related commands
|
/// Authentication-related commands
|
||||||
#[command(subcommand)]
|
Auth(auth::AuthSubcommand),
|
||||||
Auth(auth::AuthCommands),
|
|
||||||
|
|
||||||
/// Configuration-related commands
|
/// Configuration-related commands
|
||||||
#[command(subcommand)]
|
#[command(subcommand)]
|
||||||
|
@ -73,31 +71,26 @@ pub enum Subcommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Subcommand {
|
impl Subcommand {
|
||||||
pub fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::blocking::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
match self {
|
match self {
|
||||||
Subcommand::Auth(auth) => auth.run(project, reqwest),
|
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
||||||
Subcommand::Config(config) => config.run(),
|
Subcommand::Config(config) => config.run().await,
|
||||||
Subcommand::Init(init) => init.run(project),
|
Subcommand::Init(init) => init.run(project).await,
|
||||||
Subcommand::Run(run) => run.run(project),
|
Subcommand::Run(run) => run.run(project).await,
|
||||||
Subcommand::Install(install) => install.run(project, multi, reqwest),
|
Subcommand::Install(install) => install.run(project, reqwest).await,
|
||||||
Subcommand::Publish(publish) => publish.run(project, reqwest),
|
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
Subcommand::SelfInstall(self_install) => self_install.run(),
|
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
Subcommand::Patch(patch) => patch.run(project, reqwest),
|
Subcommand::Patch(patch) => patch.run(project, reqwest).await,
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project),
|
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest),
|
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
||||||
Subcommand::Add(add) => add.run(project),
|
Subcommand::Add(add) => add.run(project).await,
|
||||||
Subcommand::Update(update) => update.run(project, multi, reqwest),
|
Subcommand::Update(update) => update.run(project, reqwest).await,
|
||||||
Subcommand::Outdated(outdated) => outdated.run(project),
|
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
||||||
Subcommand::Execute(execute) => execute.run(project, reqwest),
|
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,19 @@
|
||||||
use std::collections::HashSet;
|
use crate::cli::up_to_date_lockfile;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use semver::VersionReq;
|
use futures::future::try_join_all;
|
||||||
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
|
refresh_sources,
|
||||||
source::{
|
source::{
|
||||||
|
refs::PackageRefs,
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{PackageRef, PackageSource},
|
||||||
},
|
},
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
|
use semver::VersionReq;
|
||||||
|
use std::{collections::HashSet, sync::Arc};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct OutdatedCommand {
|
pub struct OutdatedCommand {
|
||||||
|
@ -20,60 +23,112 @@ pub struct OutdatedCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OutdatedCommand {
|
impl OutdatedCommand {
|
||||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let graph = project.deser_lockfile()?.graph;
|
let graph = match up_to_date_lockfile(&project).await? {
|
||||||
|
Some(file) => file.graph,
|
||||||
|
None => {
|
||||||
|
anyhow::bail!(
|
||||||
|
"lockfile is out of sync, run `{} install` to update it",
|
||||||
|
env!("CARGO_BIN_NAME")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
for (name, versions) in graph {
|
refresh_sources(
|
||||||
for (current_version_id, node) in versions {
|
&project,
|
||||||
let Some((alias, mut specifier)) = node.node.direct else {
|
graph
|
||||||
continue;
|
.iter()
|
||||||
};
|
.flat_map(|(_, versions)| versions.iter())
|
||||||
|
.map(|(_, node)| node.node.pkg_ref.source()),
|
||||||
|
&mut refreshed_sources,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if matches!(
|
let refreshed_sources = Arc::new(Mutex::new(refreshed_sources));
|
||||||
specifier,
|
|
||||||
DependencySpecifiers::Git(_) | DependencySpecifiers::Workspace(_)
|
|
||||||
) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let source = node.node.pkg_ref.source();
|
if try_join_all(
|
||||||
|
graph
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|(_, versions)| versions.into_iter())
|
||||||
|
.map(|(current_version_id, node)| {
|
||||||
|
let project = project.clone();
|
||||||
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
|
async move {
|
||||||
|
let Some((alias, mut specifier, _)) = node.node.direct else {
|
||||||
|
return Ok::<bool, anyhow::Error>(true);
|
||||||
|
};
|
||||||
|
|
||||||
if refreshed_sources.insert(source.clone()) {
|
if matches!(
|
||||||
source.refresh(&project)?;
|
specifier,
|
||||||
}
|
DependencySpecifiers::Git(_) | DependencySpecifiers::Workspace(_)
|
||||||
|
) {
|
||||||
if !self.strict {
|
return Ok(true);
|
||||||
match specifier {
|
|
||||||
DependencySpecifiers::Pesde(ref mut spec) => {
|
|
||||||
spec.version = VersionReq::STAR;
|
|
||||||
}
|
}
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
DependencySpecifiers::Wally(ref mut spec) => {
|
let source = node.node.pkg_ref.source();
|
||||||
spec.version = VersionReq::STAR;
|
|
||||||
|
if !self.strict {
|
||||||
|
match specifier {
|
||||||
|
DependencySpecifiers::Pesde(ref mut spec) => {
|
||||||
|
spec.version = VersionReq::STAR;
|
||||||
|
}
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
DependencySpecifiers::Wally(ref mut spec) => {
|
||||||
|
spec.version = VersionReq::STAR;
|
||||||
|
}
|
||||||
|
DependencySpecifiers::Git(_) => {}
|
||||||
|
DependencySpecifiers::Workspace(_) => {}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {}
|
|
||||||
DependencySpecifiers::Workspace(_) => {}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let version_id = source
|
let version_id = source
|
||||||
.resolve(&specifier, &project, manifest.target.kind())
|
.resolve(
|
||||||
.context("failed to resolve package versions")?
|
&specifier,
|
||||||
.1
|
&project,
|
||||||
.pop_last()
|
manifest_target_kind,
|
||||||
.map(|(v_id, _)| v_id)
|
&mut *refreshed_sources.lock().await,
|
||||||
.context(format!("no versions of {specifier} found"))?;
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to resolve package versions")?
|
||||||
|
.1
|
||||||
|
.pop_last()
|
||||||
|
.map(|(v_id, _)| v_id)
|
||||||
|
.context(format!("no versions of {specifier} found"))?;
|
||||||
|
|
||||||
if version_id != current_version_id {
|
if version_id != current_version_id {
|
||||||
println!("{name} ({alias}) {current_version_id} -> {version_id}");
|
println!(
|
||||||
}
|
"{} {} ({alias}) {} -> {}",
|
||||||
}
|
match node.node.pkg_ref {
|
||||||
|
PackageRefs::Pesde(pkg_ref) => pkg_ref.name.to_string(),
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
PackageRefs::Wally(pkg_ref) => pkg_ref.name.to_string(),
|
||||||
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
|
current_version_id.target(),
|
||||||
|
current_version_id.version(),
|
||||||
|
version_id.version()
|
||||||
|
);
|
||||||
|
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.all(|b| b)
|
||||||
|
{
|
||||||
|
println!("all packages are up to date");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -2,6 +2,7 @@ use crate::cli::{up_to_date_lockfile, VersionedPackageName};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
patches::setup_patches_repo,
|
patches::setup_patches_repo,
|
||||||
source::{
|
source::{
|
||||||
|
@ -19,8 +20,8 @@ pub struct PatchCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PatchCommand {
|
impl PatchCommand {
|
||||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
|
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||||
lockfile.graph
|
lockfile.graph
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||||
|
@ -45,12 +46,14 @@ impl PatchCommand {
|
||||||
.join(name.escaped())
|
.join(name.escaped())
|
||||||
.join(version_id.escaped())
|
.join(version_id.escaped())
|
||||||
.join(chrono::Utc::now().timestamp().to_string());
|
.join(chrono::Utc::now().timestamp().to_string());
|
||||||
std::fs::create_dir_all(&directory)?;
|
fs::create_dir_all(&directory).await?;
|
||||||
|
|
||||||
source
|
source
|
||||||
.download(&node.node.pkg_ref, &project, &reqwest)?
|
.download(&node.node.pkg_ref, &project, &reqwest)
|
||||||
|
.await?
|
||||||
.0
|
.0
|
||||||
.write_to(&directory, project.cas_dir(), false)
|
.write_to(&directory, project.cas_dir(), false)
|
||||||
|
.await
|
||||||
.context("failed to write package contents")?;
|
.context("failed to write package contents")?;
|
||||||
|
|
||||||
setup_patches_repo(&directory)?;
|
setup_patches_repo(&directory)?;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::cli::up_to_date_lockfile;
|
use crate::cli::up_to_date_lockfile;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
|
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
|
||||||
use std::{path::PathBuf, str::FromStr};
|
use std::{path::PathBuf, str::FromStr};
|
||||||
|
|
||||||
|
@ -12,8 +13,8 @@ pub struct PatchCommitCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PatchCommitCommand {
|
impl PatchCommitCommand {
|
||||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
|
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||||
lockfile.graph
|
lockfile.graph
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||||
|
@ -48,15 +49,22 @@ impl PatchCommitCommand {
|
||||||
.context("package not found in graph")?;
|
.context("package not found in graph")?;
|
||||||
|
|
||||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||||
&project.read_manifest().context("failed to read manifest")?,
|
&project
|
||||||
|
.read_manifest()
|
||||||
|
.await
|
||||||
|
.context("failed to read manifest")?,
|
||||||
)
|
)
|
||||||
.context("failed to parse manifest")?;
|
.context("failed to parse manifest")?;
|
||||||
|
|
||||||
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
||||||
std::fs::remove_dir_all(self.directory).context("failed to remove patch directory")?;
|
fs::remove_dir_all(self.directory)
|
||||||
|
.await
|
||||||
|
.context("failed to remove patch directory")?;
|
||||||
|
|
||||||
let patches_dir = project.package_dir().join("patches");
|
let patches_dir = project.package_dir().join("patches");
|
||||||
std::fs::create_dir_all(&patches_dir).context("failed to create patches directory")?;
|
fs::create_dir_all(&patches_dir)
|
||||||
|
.await
|
||||||
|
.context("failed to create patches directory")?;
|
||||||
|
|
||||||
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
|
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
|
||||||
|
|
||||||
|
@ -65,7 +73,9 @@ impl PatchCommitCommand {
|
||||||
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(&patch_file, patch).context("failed to write patch file")?;
|
fs::write(&patch_file, patch)
|
||||||
|
.await
|
||||||
|
.context("failed to write patch file")?;
|
||||||
|
|
||||||
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
[&name.to_string()][&version_id.to_string()] =
|
[&name.to_string()][&version_id.to_string()] =
|
||||||
|
@ -73,6 +83,7 @@ impl PatchCommitCommand {
|
||||||
|
|
||||||
project
|
project
|
||||||
.write_manifest(manifest.to_string())
|
.write_manifest(manifest.to_string())
|
||||||
|
.await
|
||||||
.context("failed to write manifest")?;
|
.context("failed to write manifest")?;
|
||||||
|
|
||||||
println!(concat!(
|
println!(concat!(
|
||||||
|
|
|
@ -1,29 +1,34 @@
|
||||||
|
use crate::cli::{display_err, run_on_workspace_members, up_to_date_lockfile};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use async_compression::Level;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use reqwest::StatusCode;
|
use fs_err::tokio as fs;
|
||||||
use semver::VersionReq;
|
#[allow(deprecated)]
|
||||||
use std::{
|
|
||||||
io::{Seek, Write},
|
|
||||||
path::Component,
|
|
||||||
};
|
|
||||||
use tempfile::tempfile;
|
|
||||||
|
|
||||||
use crate::cli::{run_on_workspace_members, up_to_date_lockfile};
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
manifest::{target::Target, DependencyType},
|
manifest::{target::Target, DependencyType},
|
||||||
|
matching_globs_old_behaviour,
|
||||||
scripts::ScriptName,
|
scripts::ScriptName,
|
||||||
source::{
|
source::{
|
||||||
|
git_index::GitBasedSource,
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::PackageSource,
|
traits::PackageSource,
|
||||||
workspace::{specifier::VersionType, WorkspacePackageSource},
|
workspace::{
|
||||||
|
specifier::{VersionType, VersionTypeOrReq},
|
||||||
|
WorkspacePackageSource,
|
||||||
|
},
|
||||||
IGNORED_DIRS, IGNORED_FILES,
|
IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
|
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||||
|
use semver::VersionReq;
|
||||||
|
use std::{collections::HashSet, path::PathBuf};
|
||||||
|
use tempfile::Builder;
|
||||||
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||||
|
|
||||||
#[derive(Debug, Args, Copy, Clone)]
|
#[derive(Debug, Args, Clone)]
|
||||||
pub struct PublishCommand {
|
pub struct PublishCommand {
|
||||||
/// Whether to output a tarball instead of publishing
|
/// Whether to output a tarball instead of publishing
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
|
@ -32,12 +37,22 @@ pub struct PublishCommand {
|
||||||
/// Agree to all prompts
|
/// Agree to all prompts
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
yes: bool,
|
yes: bool,
|
||||||
|
|
||||||
|
/// The index to publish to
|
||||||
|
#[arg(short, long, default_value_t = DEFAULT_INDEX_NAME.to_string())]
|
||||||
|
index: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PublishCommand {
|
impl PublishCommand {
|
||||||
fn run_impl(self, project: &Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
async fn run_impl(
|
||||||
|
self,
|
||||||
|
project: &Project,
|
||||||
|
reqwest: reqwest::Client,
|
||||||
|
is_root: bool,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
let mut manifest = project
|
let mut manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -48,12 +63,17 @@ impl PublishCommand {
|
||||||
);
|
);
|
||||||
|
|
||||||
if manifest.private {
|
if manifest.private {
|
||||||
println!("{}", "package is private, cannot publish".red().bold());
|
if !is_root {
|
||||||
|
println!("{}", "package is private, cannot publish".red().bold());
|
||||||
|
}
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.target.lib_path().is_none() && manifest.target.bin_path().is_none() {
|
if manifest.target.lib_path().is_none()
|
||||||
|
&& manifest.target.bin_path().is_none()
|
||||||
|
&& manifest.target.scripts().is_none_or(|s| s.is_empty())
|
||||||
|
{
|
||||||
anyhow::bail!("no exports found in target");
|
anyhow::bail!("no exports found in target");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,11 +81,11 @@ impl PublishCommand {
|
||||||
manifest.target,
|
manifest.target,
|
||||||
Target::Roblox { .. } | Target::RobloxServer { .. }
|
Target::Roblox { .. } | Target::RobloxServer { .. }
|
||||||
) {
|
) {
|
||||||
if !manifest.target.build_files().is_some_and(|f| !f.is_empty()) {
|
if manifest.target.build_files().is_none_or(|f| f.is_empty()) {
|
||||||
anyhow::bail!("no build files found in target");
|
anyhow::bail!("no build files found in target");
|
||||||
}
|
}
|
||||||
|
|
||||||
match up_to_date_lockfile(project)? {
|
match up_to_date_lockfile(project).await? {
|
||||||
Some(lockfile) => {
|
Some(lockfile) => {
|
||||||
if lockfile
|
if lockfile
|
||||||
.graph
|
.graph
|
||||||
|
@ -74,7 +94,7 @@ impl PublishCommand {
|
||||||
.filter_map(|(_, node)| node.node.direct.as_ref().map(|_| node))
|
.filter_map(|(_, node)| node.node.direct.as_ref().map(|_| node))
|
||||||
.any(|node| {
|
.any(|node| {
|
||||||
node.target.build_files().is_none()
|
node.target.build_files().is_none()
|
||||||
&& !matches!(node.node.ty, DependencyType::Dev)
|
&& !matches!(node.node.resolved_ty, DependencyType::Dev)
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
||||||
|
@ -86,17 +106,21 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut archive = tar::Builder::new(flate2::write::GzEncoder::new(
|
let canonical_package_dir = project
|
||||||
vec![],
|
.package_dir()
|
||||||
flate2::Compression::best(),
|
.canonicalize()
|
||||||
));
|
.context("failed to canonicalize package directory")?;
|
||||||
|
|
||||||
|
let mut archive = tokio_tar::Builder::new(
|
||||||
|
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
|
||||||
|
);
|
||||||
|
|
||||||
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
|
|
||||||
let mut display_build_files: Vec<String> = vec![];
|
let mut display_build_files: Vec<String> = vec![];
|
||||||
|
|
||||||
let (lib_path, bin_path, target_kind) = (
|
let (lib_path, bin_path, scripts, target_kind) = (
|
||||||
manifest.target.lib_path().cloned(),
|
manifest.target.lib_path().cloned(),
|
||||||
manifest.target.bin_path().cloned(),
|
manifest.target.bin_path().cloned(),
|
||||||
|
manifest.target.scripts().cloned(),
|
||||||
manifest.target.kind(),
|
manifest.target.kind(),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -106,74 +130,94 @@ impl PublishCommand {
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !manifest.includes.insert(MANIFEST_FILE_NAME.to_string()) {
|
#[allow(deprecated)]
|
||||||
|
let mut paths = matching_globs_old_behaviour(
|
||||||
|
project.package_dir(),
|
||||||
|
manifest.includes.iter().map(|s| s.as_str()),
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to get included files")?;
|
||||||
|
|
||||||
|
if paths.insert(PathBuf::from(MANIFEST_FILE_NAME)) {
|
||||||
println!(
|
println!(
|
||||||
"{}: {MANIFEST_FILE_NAME} was not in includes, adding it",
|
"{}: {MANIFEST_FILE_NAME} was not included, adding it",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.includes.remove(".git") {
|
if paths.iter().any(|p| p.starts_with(".git")) {
|
||||||
println!(
|
anyhow::bail!("git directory was included, please remove it");
|
||||||
"{}: .git was in includes, removing it",
|
|
||||||
"warn".yellow().bold()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !manifest.includes.iter().any(|f| {
|
if !paths.iter().any(|f| {
|
||||||
matches!(
|
matches!(
|
||||||
f.to_lowercase().as_str(),
|
f.to_str().unwrap().to_lowercase().as_str(),
|
||||||
"readme" | "readme.md" | "readme.txt"
|
"readme" | "readme.md" | "readme.txt"
|
||||||
)
|
)
|
||||||
}) {
|
}) {
|
||||||
println!(
|
println!(
|
||||||
"{}: no README file in includes, consider adding one",
|
"{}: no README file included, consider adding one",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !manifest.includes.iter().any(|f| f == "docs") {
|
if !paths.iter().any(|p| p.starts_with("docs")) {
|
||||||
println!(
|
println!(
|
||||||
"{}: no docs directory in includes, consider adding one",
|
"{}: docs directory not included, consider adding one",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.includes.remove("default.project.json") {
|
for path in &paths {
|
||||||
println!(
|
if path
|
||||||
"{}: default.project.json was in includes, this should be generated by the {} script upon dependants installation",
|
.file_name()
|
||||||
"warn".yellow().bold(),
|
.is_some_and(|n| n == "default.project.json")
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
{
|
||||||
);
|
anyhow::bail!(
|
||||||
|
"default.project.json was included at `{}`, this should be generated by the {} script upon dependants installation",
|
||||||
|
path.display(),
|
||||||
|
ScriptName::RobloxSyncConfigGenerator
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) {
|
for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) {
|
||||||
if manifest.includes.remove(*ignored_path) {
|
if paths.iter().any(|p| {
|
||||||
println!(
|
p.components()
|
||||||
r#"{}: {ignored_path} was in includes, removing it.
|
.any(|ct| ct == std::path::Component::Normal(ignored_path.as_ref()))
|
||||||
{}: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
}) {
|
||||||
{}: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
anyhow::bail!(
|
||||||
"warn".yellow().bold(),
|
r#"forbidden file {ignored_path} was included.
|
||||||
"info".blue().bold(),
|
info: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
||||||
"info".blue().bold()
|
info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] {
|
for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] {
|
||||||
let Some(export_path) = path else { continue };
|
let Some(relative_export_path) = path else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
let export_path = export_path.to_path(project.package_dir());
|
let export_path = relative_export_path.to_path(&canonical_package_dir);
|
||||||
if !export_path.exists() {
|
|
||||||
anyhow::bail!("{name} points to non-existent file");
|
|
||||||
}
|
|
||||||
|
|
||||||
if !export_path.is_file() {
|
let contents = match fs::read_to_string(&export_path).await {
|
||||||
anyhow::bail!("{name} must point to a file");
|
Ok(contents) => contents,
|
||||||
}
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
anyhow::bail!("{name} does not exist");
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||||
|
anyhow::bail!("{name} must point to a file");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e).context(format!("failed to read {name}"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let contents =
|
let export_path = export_path
|
||||||
std::fs::read_to_string(&export_path).context(format!("failed to read {name}"))?;
|
.canonicalize()
|
||||||
|
.context(format!("failed to canonicalize {name}"))?;
|
||||||
|
|
||||||
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||||
errs.into_iter()
|
errs.into_iter()
|
||||||
|
@ -184,66 +228,39 @@ impl PublishCommand {
|
||||||
anyhow::bail!("{name} is not a valid Luau file: {err}");
|
anyhow::bail!("{name} is not a valid Luau file: {err}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let first_part = export_path
|
let first_part = relative_export_path
|
||||||
.strip_prefix(project.package_dir())
|
|
||||||
.context(format!("{name} not within project directory"))?
|
|
||||||
.components()
|
.components()
|
||||||
.next()
|
.next()
|
||||||
.context(format!("{name} must contain at least one part"))?;
|
.context(format!("{name} must contain at least one part"))?;
|
||||||
|
|
||||||
let first_part = match first_part {
|
let first_part = match first_part {
|
||||||
Component::Normal(part) => part,
|
relative_path::Component::Normal(part) => part,
|
||||||
_ => anyhow::bail!("{name} must be within project directory"),
|
_ => anyhow::bail!("{name} must be within project directory"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_part_str = first_part.to_string_lossy();
|
if paths.insert(
|
||||||
|
export_path
|
||||||
if manifest.includes.insert(first_part_str.to_string()) {
|
.strip_prefix(&canonical_package_dir)
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf(),
|
||||||
|
) {
|
||||||
println!(
|
println!(
|
||||||
"{}: {name} was not in includes, adding {first_part_str}",
|
"{}: {name} was not included, adding {relative_export_path}",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if roblox_target.as_mut().map_or(false, |build_files| {
|
if roblox_target
|
||||||
build_files.insert(first_part_str.to_string())
|
.as_mut()
|
||||||
}) {
|
.is_some_and(|build_files| build_files.insert(first_part.to_string()))
|
||||||
|
{
|
||||||
println!(
|
println!(
|
||||||
"{}: {name} was not in build files, adding {first_part_str}",
|
"{}: {name} was not in build files, adding {first_part}",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for included_name in &manifest.includes {
|
|
||||||
let included_path = project.package_dir().join(included_name);
|
|
||||||
|
|
||||||
if !included_path.exists() {
|
|
||||||
anyhow::bail!("included file {included_name} does not exist");
|
|
||||||
}
|
|
||||||
|
|
||||||
// it's already included, and guaranteed to be a file
|
|
||||||
if included_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if included_path.is_file() {
|
|
||||||
display_includes.push(included_name.clone());
|
|
||||||
|
|
||||||
archive.append_file(
|
|
||||||
included_name,
|
|
||||||
&mut std::fs::File::open(&included_path)
|
|
||||||
.context(format!("failed to read {included_name}"))?,
|
|
||||||
)?;
|
|
||||||
} else {
|
|
||||||
display_includes.push(format!("{included_name}/*"));
|
|
||||||
|
|
||||||
archive
|
|
||||||
.append_dir_all(included_name, &included_path)
|
|
||||||
.context(format!("failed to include directory {included_name}"))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(build_files) = &roblox_target {
|
if let Some(build_files) = &roblox_target {
|
||||||
for build_file in build_files.iter() {
|
for build_file in build_files.iter() {
|
||||||
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||||
|
@ -261,8 +278,8 @@ impl PublishCommand {
|
||||||
anyhow::bail!("build file {build_file} does not exist");
|
anyhow::bail!("build file {build_file} does not exist");
|
||||||
}
|
}
|
||||||
|
|
||||||
if !manifest.includes.contains(build_file) {
|
if !paths.iter().any(|p| p.starts_with(build_file)) {
|
||||||
anyhow::bail!("build file {build_file} is not in includes, please add it");
|
anyhow::bail!("build file {build_file} is not included, please add it");
|
||||||
}
|
}
|
||||||
|
|
||||||
if build_file_path.is_file() {
|
if build_file_path.is_file() {
|
||||||
|
@ -273,9 +290,80 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
if let Some(scripts) = scripts {
|
||||||
let mut has_wally = false;
|
for (name, path) in scripts {
|
||||||
let mut has_git = false;
|
let script_path = path.to_path(&canonical_package_dir);
|
||||||
|
|
||||||
|
let contents = match fs::read_to_string(&script_path).await {
|
||||||
|
Ok(contents) => contents,
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
anyhow::bail!("script {name} does not exist");
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||||
|
anyhow::bail!("script {name} must point to a file");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e).context(format!("failed to read script {name}"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let script_path = script_path
|
||||||
|
.canonicalize()
|
||||||
|
.context(format!("failed to canonicalize script {name}"))?;
|
||||||
|
|
||||||
|
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||||
|
errs.into_iter()
|
||||||
|
.map(|err| err.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
}) {
|
||||||
|
anyhow::bail!("script {name} is not a valid Luau file: {err}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if paths.insert(
|
||||||
|
script_path
|
||||||
|
.strip_prefix(&canonical_package_dir)
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf(),
|
||||||
|
) {
|
||||||
|
println!(
|
||||||
|
"{}: script {name} was not included, adding {path}",
|
||||||
|
"warn".yellow().bold()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for relative_path in &paths {
|
||||||
|
let path = project.package_dir().join(relative_path);
|
||||||
|
|
||||||
|
if !path.exists() {
|
||||||
|
anyhow::bail!("included file `{}` does not exist", path.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
let file_name = relative_path
|
||||||
|
.file_name()
|
||||||
|
.context("failed to get file name")?
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
// it'll be included later after transformations, and is guaranteed to be a file
|
||||||
|
if file_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if path.is_file() {
|
||||||
|
archive
|
||||||
|
.append_file(
|
||||||
|
&relative_path,
|
||||||
|
fs::File::open(&path)
|
||||||
|
.await
|
||||||
|
.context(format!("failed to read `{}`", relative_path.display()))?
|
||||||
|
.file_mut(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for specifier in manifest
|
for specifier in manifest
|
||||||
.dependencies
|
.dependencies
|
||||||
|
@ -300,8 +388,6 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
DependencySpecifiers::Wally(specifier) => {
|
DependencySpecifiers::Wally(specifier) => {
|
||||||
has_wally = true;
|
|
||||||
|
|
||||||
let index_name = specifier
|
let index_name = specifier
|
||||||
.index
|
.index
|
||||||
.as_deref()
|
.as_deref()
|
||||||
|
@ -317,12 +403,11 @@ impl PublishCommand {
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {
|
DependencySpecifiers::Git(_) => {}
|
||||||
has_git = true;
|
|
||||||
}
|
|
||||||
DependencySpecifiers::Workspace(spec) => {
|
DependencySpecifiers::Workspace(spec) => {
|
||||||
let pkg_ref = WorkspacePackageSource
|
let pkg_ref = WorkspacePackageSource
|
||||||
.resolve(spec, project, target_kind)
|
.resolve(spec, project, target_kind, &mut HashSet::new())
|
||||||
|
.await
|
||||||
.context("failed to resolve workspace package")?
|
.context("failed to resolve workspace package")?
|
||||||
.1
|
.1
|
||||||
.pop_last()
|
.pop_last()
|
||||||
|
@ -337,15 +422,19 @@ impl PublishCommand {
|
||||||
.context("failed to get workspace directory")?,
|
.context("failed to get workspace directory")?,
|
||||||
)
|
)
|
||||||
.join(MANIFEST_FILE_NAME);
|
.join(MANIFEST_FILE_NAME);
|
||||||
let manifest = std::fs::read_to_string(&manifest)
|
let manifest = fs::read_to_string(&manifest)
|
||||||
|
.await
|
||||||
.context("failed to read workspace package manifest")?;
|
.context("failed to read workspace package manifest")?;
|
||||||
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
|
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
|
||||||
.context("failed to parse workspace package manifest")?;
|
.context("failed to parse workspace package manifest")?;
|
||||||
|
|
||||||
*specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
*specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
||||||
name: spec.name.clone(),
|
name: spec.name.clone(),
|
||||||
version: match spec.version_type {
|
version: match spec.version.clone() {
|
||||||
VersionType::Wildcard => VersionReq::STAR,
|
VersionTypeOrReq::VersionType(VersionType::Wildcard) => {
|
||||||
|
VersionReq::STAR
|
||||||
|
}
|
||||||
|
VersionTypeOrReq::Req(r) => r,
|
||||||
v => VersionReq::parse(&format!("{v}{}", manifest.version))
|
v => VersionReq::parse(&format!("{v}{}", manifest.version))
|
||||||
.context(format!("failed to parse version for {v}"))?,
|
.context(format!("failed to parse version for {v}"))?,
|
||||||
},
|
},
|
||||||
|
@ -412,11 +501,25 @@ impl PublishCommand {
|
||||||
.bin_path()
|
.bin_path()
|
||||||
.map_or("(none)".to_string(), |p| p.to_string())
|
.map_or("(none)".to_string(), |p| p.to_string())
|
||||||
);
|
);
|
||||||
|
println!(
|
||||||
|
"\tscripts: {}",
|
||||||
|
manifest
|
||||||
|
.target
|
||||||
|
.scripts()
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.map_or("(none)".to_string(), |s| {
|
||||||
|
s.keys().cloned().collect::<Vec<_>>().join(", ")
|
||||||
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"includes: {}",
|
"includes: {}",
|
||||||
display_includes.into_iter().collect::<Vec<_>>().join(", ")
|
paths
|
||||||
|
.into_iter()
|
||||||
|
.map(|p| p.to_string_lossy().to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
);
|
);
|
||||||
|
|
||||||
if !self.dry_run
|
if !self.dry_run
|
||||||
|
@ -431,38 +534,53 @@ impl PublishCommand {
|
||||||
println!();
|
println!();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut temp_manifest = tempfile().context("failed to create temp manifest file")?;
|
let temp_path = Builder::new().make(|_| Ok(()))?.into_temp_path();
|
||||||
|
let mut temp_manifest = fs::OpenOptions::new()
|
||||||
|
.create(true)
|
||||||
|
.write(true)
|
||||||
|
.truncate(true)
|
||||||
|
.read(true)
|
||||||
|
.open(temp_path.to_path_buf())
|
||||||
|
.await?;
|
||||||
|
|
||||||
temp_manifest
|
temp_manifest
|
||||||
.write_all(
|
.write_all(
|
||||||
toml::to_string(&manifest)
|
toml::to_string(&manifest)
|
||||||
.context("failed to serialize manifest")?
|
.context("failed to serialize manifest")?
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
.context("failed to write temp manifest file")?;
|
.context("failed to write temp manifest file")?;
|
||||||
temp_manifest
|
temp_manifest
|
||||||
.rewind()
|
.rewind()
|
||||||
|
.await
|
||||||
.context("failed to rewind temp manifest file")?;
|
.context("failed to rewind temp manifest file")?;
|
||||||
|
|
||||||
archive.append_file(MANIFEST_FILE_NAME, &mut temp_manifest)?;
|
archive
|
||||||
|
.append_file(MANIFEST_FILE_NAME, temp_manifest.file_mut())
|
||||||
|
.await?;
|
||||||
|
|
||||||
let archive = archive
|
let mut encoder = archive
|
||||||
.into_inner()
|
.into_inner()
|
||||||
.context("failed to encode archive")?
|
.await
|
||||||
.finish()
|
.context("failed to finish archive")?;
|
||||||
.context("failed to get archive bytes")?;
|
encoder
|
||||||
|
.shutdown()
|
||||||
|
.await
|
||||||
|
.context("failed to finish archive")?;
|
||||||
|
let archive = encoder.into_inner();
|
||||||
|
|
||||||
let source = PesdePackageSource::new(
|
let index_url = manifest
|
||||||
manifest
|
.indices
|
||||||
.indices
|
.get(&self.index)
|
||||||
.get(DEFAULT_INDEX_NAME)
|
.context(format!("missing index {}", self.index))?;
|
||||||
.context("missing default index")?
|
let source = PesdePackageSource::new(index_url.clone());
|
||||||
.clone(),
|
PackageSource::refresh(&source, project)
|
||||||
);
|
.await
|
||||||
source
|
|
||||||
.refresh(project)
|
|
||||||
.context("failed to refresh source")?;
|
.context("failed to refresh source")?;
|
||||||
let config = source
|
let config = source
|
||||||
.config(project)
|
.config(project)
|
||||||
|
.await
|
||||||
.context("failed to get source config")?;
|
.context("failed to get source config")?;
|
||||||
|
|
||||||
if archive.len() > config.max_archive_size {
|
if archive.len() > config.max_archive_size {
|
||||||
|
@ -473,19 +591,27 @@ impl PublishCommand {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
manifest.all_dependencies().context("dependency conflict")?;
|
let deps = manifest.all_dependencies().context("dependency conflict")?;
|
||||||
|
|
||||||
if !config.git_allowed && has_git {
|
if let Some((disallowed, _)) = deps.iter().find(|(_, (spec, _))| match spec {
|
||||||
anyhow::bail!("git dependencies are not allowed on this index");
|
DependencySpecifiers::Pesde(spec) => {
|
||||||
}
|
!config.other_registries_allowed.is_allowed_or_same(
|
||||||
|
source.repo_url().clone(),
|
||||||
#[cfg(feature = "wally-compat")]
|
gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap(),
|
||||||
if !config.wally_allowed && has_wally {
|
)
|
||||||
anyhow::bail!("wally dependencies are not allowed on this index");
|
}
|
||||||
|
DependencySpecifiers::Git(spec) => !config.git_allowed.is_allowed(spec.repo.clone()),
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
DependencySpecifiers::Wally(spec) => !config
|
||||||
|
.wally_allowed
|
||||||
|
.is_allowed(gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap()),
|
||||||
|
_ => false,
|
||||||
|
}) {
|
||||||
|
anyhow::bail!("dependency `{disallowed}` is not allowed on this index");
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.dry_run {
|
if self.dry_run {
|
||||||
std::fs::write("package.tar.gz", archive)?;
|
fs::write("package.tar.gz", archive).await?;
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"{}",
|
"{}",
|
||||||
|
@ -495,17 +621,22 @@ impl PublishCommand {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = reqwest
|
let mut request = reqwest
|
||||||
.post(format!("{}/v0/packages", config.api()))
|
.post(format!("{}/v0/packages", config.api()))
|
||||||
.multipart(reqwest::blocking::multipart::Form::new().part(
|
.body(archive);
|
||||||
"tarball",
|
|
||||||
reqwest::blocking::multipart::Part::bytes(archive).file_name("package.tar.gz"),
|
if let Some(token) = project.auth_config().tokens().get(index_url) {
|
||||||
))
|
tracing::debug!("using token for {index_url}");
|
||||||
.send()
|
request = request.header(AUTHORIZATION, token);
|
||||||
.context("failed to send request")?;
|
}
|
||||||
|
|
||||||
|
let response = request.send().await.context("failed to send request")?;
|
||||||
|
|
||||||
let status = response.status();
|
let status = response.status();
|
||||||
let text = response.text().context("failed to get response text")?;
|
let text = response
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.context("failed to get response text")?;
|
||||||
match status {
|
match status {
|
||||||
StatusCode::CONFLICT => {
|
StatusCode::CONFLICT => {
|
||||||
println!("{}", "package version already exists".red().bold());
|
println!("{}", "package version already exists".red().bold());
|
||||||
|
@ -530,15 +661,20 @@ impl PublishCommand {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let result = self.run_impl(&project, reqwest.clone());
|
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
|
||||||
if project.workspace_dir().is_some() {
|
if project.workspace_dir().is_some() {
|
||||||
return result;
|
return result;
|
||||||
} else if let Err(result) = result {
|
} else {
|
||||||
println!("an error occurred publishing workspace root: {result}");
|
display_err(result, " occurred publishing workspace root");
|
||||||
}
|
}
|
||||||
|
|
||||||
run_on_workspace_members(&project, |project| self.run_impl(&project, reqwest.clone()))
|
run_on_workspace_members(&project, |project| {
|
||||||
.map(|_| ())
|
let reqwest = reqwest.clone();
|
||||||
|
let this = self.clone();
|
||||||
|
async move { this.run_impl(&project, reqwest, false).await }
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
use crate::cli::up_to_date_lockfile;
|
use crate::cli::up_to_date_lockfile;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use futures::{StreamExt, TryStreamExt};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
linking::generator::generate_bin_linking_module,
|
linking::generator::generate_bin_linking_module,
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
source::traits::PackageRef,
|
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||||
Project, PACKAGES_CONTAINER_NAME,
|
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command};
|
use std::{
|
||||||
|
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::PathBuf,
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct RunCommand {
|
pub struct RunCommand {
|
||||||
|
@ -22,14 +25,14 @@ pub struct RunCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RunCommand {
|
impl RunCommand {
|
||||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let run = |path: PathBuf| {
|
let run = |root: PathBuf, file_path: PathBuf| {
|
||||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||||
caller
|
caller
|
||||||
.write_all(
|
.write_all(
|
||||||
generate_bin_linking_module(
|
generate_bin_linking_module(
|
||||||
project.package_dir(),
|
root,
|
||||||
&format!("{:?}", path.to_string_lossy()),
|
&format!("{:?}", file_path.to_string_lossy()),
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
|
@ -49,19 +52,20 @@ impl RunCommand {
|
||||||
std::process::exit(status.code().unwrap_or(1))
|
std::process::exit(status.code().unwrap_or(1))
|
||||||
};
|
};
|
||||||
|
|
||||||
let package_or_script = match self.package_or_script {
|
let Some(package_or_script) = self.package_or_script else {
|
||||||
Some(package_or_script) => package_or_script,
|
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
||||||
None => {
|
run(
|
||||||
if let Some(script_path) = project.deser_manifest()?.target.bin_path() {
|
project.package_dir().to_owned(),
|
||||||
run(script_path.to_path(project.package_dir()));
|
script_path.to_path(project.package_dir()),
|
||||||
}
|
);
|
||||||
|
return Ok(());
|
||||||
anyhow::bail!("no package or script specified")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
anyhow::bail!("no package or script specified, and no bin path found in manifest")
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
|
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
|
||||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
|
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||||
lockfile.graph
|
lockfile.graph
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||||
|
@ -79,10 +83,11 @@ impl RunCommand {
|
||||||
};
|
};
|
||||||
|
|
||||||
let base_folder = project
|
let base_folder = project
|
||||||
.deser_manifest()?
|
.deser_manifest()
|
||||||
|
.await?
|
||||||
.target
|
.target
|
||||||
.kind()
|
.kind()
|
||||||
.packages_folder(&node.node.pkg_ref.target_kind());
|
.packages_folder(version_id.target());
|
||||||
let container_folder = node.node.container_folder(
|
let container_folder = node.node.container_folder(
|
||||||
&project
|
&project
|
||||||
.package_dir()
|
.package_dir()
|
||||||
|
@ -92,13 +97,20 @@ impl RunCommand {
|
||||||
version_id.version(),
|
version_id.version(),
|
||||||
);
|
);
|
||||||
|
|
||||||
run(bin_path.to_path(&container_folder))
|
let path = bin_path.to_path(&container_folder);
|
||||||
|
|
||||||
|
run(path.clone(), path);
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(manifest) = project.deser_manifest() {
|
if let Ok(manifest) = project.deser_manifest().await {
|
||||||
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
||||||
run(script_path.to_path(project.package_dir()))
|
run(
|
||||||
|
project.package_dir().to_path_buf(),
|
||||||
|
script_path.to_path(project.package_dir()),
|
||||||
|
);
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -106,10 +118,59 @@ impl RunCommand {
|
||||||
let path = relative_path.to_path(project.package_dir());
|
let path = relative_path.to_path(project.package_dir());
|
||||||
|
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
anyhow::bail!("path does not exist: {}", path.display());
|
anyhow::bail!("path `{}` does not exist", path.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
run(path);
|
let workspace_dir = project
|
||||||
|
.workspace_dir()
|
||||||
|
.unwrap_or_else(|| project.package_dir());
|
||||||
|
|
||||||
|
let members = match project.workspace_members(workspace_dir, false).await {
|
||||||
|
Ok(members) => members.boxed(),
|
||||||
|
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
|
||||||
|
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||||
|
{
|
||||||
|
futures::stream::empty().boxed()
|
||||||
|
}
|
||||||
|
Err(e) => Err(e).context("failed to get workspace members")?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let members = members
|
||||||
|
.map(|res| {
|
||||||
|
res.map_err(anyhow::Error::from)
|
||||||
|
.and_then(|(path, _)| path.canonicalize().map_err(Into::into))
|
||||||
|
})
|
||||||
|
.chain(futures::stream::once(async {
|
||||||
|
workspace_dir.canonicalize().map_err(Into::into)
|
||||||
|
}))
|
||||||
|
.try_collect::<HashSet<_>>()
|
||||||
|
.await
|
||||||
|
.context("failed to collect workspace members")?;
|
||||||
|
|
||||||
|
let root = 'finder: {
|
||||||
|
let mut current_path = path.to_path_buf();
|
||||||
|
loop {
|
||||||
|
let canonical_path = current_path
|
||||||
|
.canonicalize()
|
||||||
|
.context("failed to canonicalize parent")?;
|
||||||
|
|
||||||
|
if members.contains(&canonical_path)
|
||||||
|
&& canonical_path.join(MANIFEST_FILE_NAME).exists()
|
||||||
|
{
|
||||||
|
break 'finder canonical_path;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parent) = current_path.parent() {
|
||||||
|
current_path = parent.to_path_buf();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
project.package_dir().to_path_buf()
|
||||||
|
};
|
||||||
|
|
||||||
|
run(root, path);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
use crate::cli::{version::update_bin_exe, HOME_DIR};
|
use crate::cli::{version::update_bin_exe, HOME_DIR};
|
||||||
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
use std::env::current_exe;
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct SelfInstallCommand {
|
pub struct SelfInstallCommand {
|
||||||
/// Skip adding the bin directory to the PATH
|
/// Skip adding the bin directory to the PATH
|
||||||
|
@ -10,7 +12,7 @@ pub struct SelfInstallCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SelfInstallCommand {
|
impl SelfInstallCommand {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub async fn run(self) -> anyhow::Result<()> {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
if !self.skip_add_to_path {
|
if !self.skip_add_to_path {
|
||||||
|
@ -24,7 +26,7 @@ impl SelfInstallCommand {
|
||||||
.0;
|
.0;
|
||||||
let path: String = env.get_value("Path").context("failed to get Path value")?;
|
let path: String = env.get_value("Path").context("failed to get Path value")?;
|
||||||
|
|
||||||
let bin_dir = crate::cli::bin_dir()?;
|
let bin_dir = crate::cli::bin_dir().await?;
|
||||||
let bin_dir = bin_dir.to_string_lossy();
|
let bin_dir = bin_dir.to_string_lossy();
|
||||||
|
|
||||||
let exists = path.split(';').any(|part| *part == bin_dir);
|
let exists = path.split(';').any(|part| *part == bin_dir);
|
||||||
|
@ -68,7 +70,7 @@ and then restart your shell.
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
update_bin_exe()?;
|
update_bin_exe(¤t_exe().context("failed to get current exe path")?).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,57 @@
|
||||||
use crate::cli::{config::read_config, version::get_or_download_version};
|
use crate::cli::{
|
||||||
|
config::read_config,
|
||||||
|
version::{
|
||||||
|
current_version, get_or_download_version, get_remote_version, no_build_metadata,
|
||||||
|
update_bin_exe, TagInfo, VersionType,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use colored::Colorize;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct SelfUpgradeCommand {}
|
pub struct SelfUpgradeCommand {
|
||||||
|
/// Whether to use the version from the "upgrades available" message
|
||||||
|
#[clap(long, default_value_t = false)]
|
||||||
|
use_cached: bool,
|
||||||
|
}
|
||||||
|
|
||||||
impl SelfUpgradeCommand {
|
impl SelfUpgradeCommand {
|
||||||
pub fn run(self, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
pub async fn run(self, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let config = read_config()?;
|
let latest_version = if self.use_cached {
|
||||||
|
read_config()
|
||||||
|
.await?
|
||||||
|
.last_checked_updates
|
||||||
|
.context("no cached version found")?
|
||||||
|
.1
|
||||||
|
} else {
|
||||||
|
get_remote_version(&reqwest, VersionType::Latest).await?
|
||||||
|
};
|
||||||
|
|
||||||
get_or_download_version(&reqwest, &config.last_checked_updates.unwrap().1)?;
|
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||||
// a call to `update_bin_exe` or other similar function *should* be here, in case new versions
|
|
||||||
// have fixes to bugs in executing other versions, but that would cause
|
if latest_version_no_metadata <= current_version() {
|
||||||
// the current file to be overwritten by itself, so this needs more thought
|
println!("already up to date");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
|
||||||
|
|
||||||
|
if !inquire::prompt_confirmation(format!(
|
||||||
|
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
|
||||||
|
env!("CARGO_BIN_NAME").cyan(),
|
||||||
|
env!("CARGO_PKG_VERSION").yellow().bold()
|
||||||
|
))? {
|
||||||
|
println!("cancelled upgrade");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
|
||||||
|
.await?
|
||||||
|
.unwrap();
|
||||||
|
update_bin_exe(&path).await?;
|
||||||
|
|
||||||
|
println!("upgraded to version {display_latest_version}!");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +1,21 @@
|
||||||
use crate::cli::{download_graph, run_on_workspace_members};
|
use crate::cli::{progress_bar, run_on_workspace_members};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::{lockfile::Lockfile, Project};
|
use pesde::{lockfile::Lockfile, Project};
|
||||||
use std::collections::HashSet;
|
use std::{collections::HashSet, sync::Arc};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args, Copy, Clone)]
|
#[derive(Debug, Args, Copy, Clone)]
|
||||||
pub struct UpdateCommand {
|
pub struct UpdateCommand {}
|
||||||
/// The amount of threads to use for downloading
|
|
||||||
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
|
|
||||||
threads: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UpdateCommand {
|
impl UpdateCommand {
|
||||||
pub fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::blocking::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -34,8 +26,10 @@ impl UpdateCommand {
|
||||||
);
|
);
|
||||||
|
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(None, &mut refreshed_sources)
|
.dependency_graph(None, &mut refreshed_sources, false)
|
||||||
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
|
|
||||||
project
|
project
|
||||||
.write_lockfile(Lockfile {
|
.write_lockfile(Lockfile {
|
||||||
|
@ -44,25 +38,48 @@ impl UpdateCommand {
|
||||||
target: manifest.target.kind(),
|
target: manifest.target.kind(),
|
||||||
overrides: manifest.overrides,
|
overrides: manifest.overrides,
|
||||||
|
|
||||||
graph: download_graph(
|
graph: {
|
||||||
&project,
|
let (rx, downloaded_graph) = project
|
||||||
&mut refreshed_sources,
|
.download_and_link(
|
||||||
&graph,
|
&graph,
|
||||||
&multi,
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
&reqwest,
|
&reqwest,
|
||||||
self.threads as usize,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|_| async { Ok::<_, std::io::Error>(()) },
|
||||||
"📥 downloading dependencies".to_string(),
|
)
|
||||||
"📥 downloaded dependencies".to_string(),
|
.await
|
||||||
)?,
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
|
progress_bar(
|
||||||
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
|
rx,
|
||||||
|
"📥 ".to_string(),
|
||||||
|
"downloading dependencies".to_string(),
|
||||||
|
"downloaded dependencies".to_string(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
downloaded_graph
|
||||||
|
.await
|
||||||
|
.context("failed to download dependencies")?
|
||||||
|
},
|
||||||
|
|
||||||
workspace: run_on_workspace_members(&project, |project| {
|
workspace: run_on_workspace_members(&project, |project| {
|
||||||
self.run(project, multi.clone(), reqwest.clone())
|
let reqwest = reqwest.clone();
|
||||||
})?,
|
async move { Box::pin(self.run(project, reqwest)).await }
|
||||||
|
})
|
||||||
|
.await?,
|
||||||
})
|
})
|
||||||
|
.await
|
||||||
.context("failed to write lockfile")?;
|
.context("failed to write lockfile")?;
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"\n\n{}. run `{} install` in order to install the new dependencies",
|
||||||
|
"✅ done".green(),
|
||||||
|
env!("CARGO_BIN_NAME")
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,32 +1,19 @@
|
||||||
use std::collections::BTreeMap;
|
use crate::cli::{auth::Tokens, home_dir};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::instrument;
|
||||||
use crate::cli::home_dir;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
pub struct CliConfig {
|
pub struct CliConfig {
|
||||||
#[serde(
|
#[serde(
|
||||||
serialize_with = "crate::util::serialize_gix_url",
|
serialize_with = "crate::util::serialize_gix_url",
|
||||||
deserialize_with = "crate::util::deserialize_gix_url"
|
deserialize_with = "crate::util::deserialize_gix_url"
|
||||||
)]
|
)]
|
||||||
pub default_index: gix::Url,
|
pub default_index: gix::Url,
|
||||||
#[serde(
|
|
||||||
serialize_with = "crate::util::serialize_gix_url",
|
|
||||||
deserialize_with = "crate::util::deserialize_gix_url"
|
|
||||||
)]
|
|
||||||
pub scripts_repo: gix::Url,
|
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
pub tokens: Tokens,
|
||||||
pub token: Option<String>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
skip_serializing_if = "BTreeMap::is_empty",
|
|
||||||
serialize_with = "crate::cli::serialize_string_url_map",
|
|
||||||
deserialize_with = "crate::cli::deserialize_string_url_map"
|
|
||||||
)]
|
|
||||||
pub token_overrides: BTreeMap<gix::Url, String>,
|
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub last_checked_updates: Option<(chrono::DateTime<chrono::Utc>, semver::Version)>,
|
pub last_checked_updates: Option<(chrono::DateTime<chrono::Utc>, semver::Version)>,
|
||||||
|
@ -35,23 +22,18 @@ pub struct CliConfig {
|
||||||
impl Default for CliConfig {
|
impl Default for CliConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
default_index: "https://github.com/daimond113/pesde-index"
|
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
||||||
.try_into()
|
|
||||||
.unwrap(),
|
|
||||||
scripts_repo: "https://github.com/daimond113/pesde-scripts"
|
|
||||||
.try_into()
|
|
||||||
.unwrap(),
|
|
||||||
|
|
||||||
token: None,
|
tokens: Tokens(Default::default()),
|
||||||
token_overrides: Default::default(),
|
|
||||||
|
|
||||||
last_checked_updates: None,
|
last_checked_updates: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_config() -> anyhow::Result<CliConfig> {
|
#[instrument(level = "trace")]
|
||||||
let config_string = match std::fs::read_to_string(home_dir()?.join("config.toml")) {
|
pub async fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
|
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
||||||
Ok(config_string) => config_string,
|
Ok(config_string) => config_string,
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
return Ok(CliConfig::default());
|
return Ok(CliConfig::default());
|
||||||
|
@ -64,9 +46,11 @@ pub fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
#[instrument(level = "trace")]
|
||||||
|
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
||||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||||
std::fs::write(home_dir()?.join("config.toml"), config_string)
|
fs::write(home_dir()?.join("config.toml"), config_string)
|
||||||
|
.await
|
||||||
.context("failed to write config file")?;
|
.context("failed to write config file")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,16 +1,19 @@
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
|
pub async fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use std::os::unix::fs::PermissionsExt;
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
|
||||||
let mut perms = std::fs::metadata(&_path)
|
let mut perms = fs::metadata(&_path)
|
||||||
|
.await
|
||||||
.context("failed to get bin link file metadata")?
|
.context("failed to get bin link file metadata")?
|
||||||
.permissions();
|
.permissions();
|
||||||
perms.set_mode(perms.mode() | 0o111);
|
perms.set_mode(perms.mode() | 0o111);
|
||||||
std::fs::set_permissions(&_path, perms)
|
fs::set_permissions(&_path, perms)
|
||||||
|
.await
|
||||||
.context("failed to set bin link file permissions")?;
|
.context("failed to set bin link file permissions")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
221
src/cli/mod.rs
|
@ -1,30 +1,29 @@
|
||||||
use crate::cli::auth::get_token;
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use gix::bstr::BStr;
|
use colored::Colorize;
|
||||||
use indicatif::MultiProgress;
|
use fs_err::tokio as fs;
|
||||||
|
use futures::StreamExt;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
|
lockfile::Lockfile,
|
||||||
manifest::target::TargetKind,
|
manifest::target::TargetKind,
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
source::{version_id::VersionId, workspace::specifier::VersionType, PackageSources},
|
source::{version_id::VersionId, workspace::specifier::VersionTypeOrReq},
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serializer};
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, HashSet},
|
collections::{BTreeMap, HashSet},
|
||||||
fs::create_dir_all,
|
future::Future,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
sync::Arc,
|
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
use tokio::pin;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
pub mod repos;
|
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
pub mod version;
|
pub mod version;
|
||||||
|
|
||||||
|
@ -36,15 +35,18 @@ pub fn home_dir() -> anyhow::Result<PathBuf> {
|
||||||
.join(HOME_DIR))
|
.join(HOME_DIR))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bin_dir() -> anyhow::Result<PathBuf> {
|
pub async fn bin_dir() -> anyhow::Result<PathBuf> {
|
||||||
let bin_dir = home_dir()?.join("bin");
|
let bin_dir = home_dir()?.join("bin");
|
||||||
create_dir_all(&bin_dir).context("failed to create bin folder")?;
|
fs::create_dir_all(&bin_dir)
|
||||||
|
.await
|
||||||
|
.context("failed to create bin folder")?;
|
||||||
Ok(bin_dir)
|
Ok(bin_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
|
||||||
let manifest = project.deser_manifest()?;
|
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
||||||
let lockfile = match project.deser_lockfile() {
|
let manifest = project.deser_manifest().await?;
|
||||||
|
let lockfile = match project.deser_lockfile().await {
|
||||||
Ok(lockfile) => lockfile,
|
Ok(lockfile) => lockfile,
|
||||||
Err(pesde::errors::LockfileReadError::Io(e))
|
Err(pesde::errors::LockfileReadError::Io(e))
|
||||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||||
|
@ -55,17 +57,17 @@ pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>
|
||||||
};
|
};
|
||||||
|
|
||||||
if manifest.overrides != lockfile.overrides {
|
if manifest.overrides != lockfile.overrides {
|
||||||
log::debug!("overrides are different");
|
tracing::debug!("overrides are different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.target.kind() != lockfile.target {
|
if manifest.target.kind() != lockfile.target {
|
||||||
log::debug!("target kind is different");
|
tracing::debug!("target kind is different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
||||||
log::debug!("name or version is different");
|
tracing::debug!("name or version is different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +79,7 @@ pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>
|
||||||
node.node
|
node.node
|
||||||
.direct
|
.direct
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|(_, spec)| (spec, node.node.ty))
|
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||||
})
|
})
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
@ -85,9 +87,9 @@ pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>
|
||||||
.all_dependencies()
|
.all_dependencies()
|
||||||
.context("failed to get all dependencies")?
|
.context("failed to get all dependencies")?
|
||||||
.iter()
|
.iter()
|
||||||
.all(|(_, (spec, ty))| specs.contains(&(spec, *ty)));
|
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
||||||
|
|
||||||
log::debug!("dependencies are the same: {same_dependencies}");
|
tracing::debug!("dependencies are the same: {same_dependencies}");
|
||||||
|
|
||||||
Ok(if same_dependencies {
|
Ok(if same_dependencies {
|
||||||
Some(lockfile)
|
Some(lockfile)
|
||||||
|
@ -122,14 +124,17 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
|
||||||
|
|
||||||
impl VersionedPackageName {
|
impl VersionedPackageName {
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
fn get(self, graph: &DownloadedGraph) -> anyhow::Result<(PackageNames, VersionId)> {
|
fn get(
|
||||||
|
self,
|
||||||
|
graph: &pesde::lockfile::DownloadedGraph,
|
||||||
|
) -> anyhow::Result<(PackageNames, VersionId)> {
|
||||||
let version_id = match self.1 {
|
let version_id = match self.1 {
|
||||||
Some(version) => version,
|
Some(version) => version,
|
||||||
None => {
|
None => {
|
||||||
let versions = graph.get(&self.0).context("package not found in graph")?;
|
let versions = graph.get(&self.0).context("package not found in graph")?;
|
||||||
if versions.len() == 1 {
|
if versions.len() == 1 {
|
||||||
let version = versions.keys().next().unwrap().clone();
|
let version = versions.keys().next().unwrap().clone();
|
||||||
log::debug!("only one version found, using {version}");
|
tracing::debug!("only one version found, using {version}");
|
||||||
version
|
version
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
|
@ -152,7 +157,7 @@ impl VersionedPackageName {
|
||||||
enum AnyPackageIdentifier<V: FromStr = VersionId, N: FromStr = PackageNames> {
|
enum AnyPackageIdentifier<V: FromStr = VersionId, N: FromStr = PackageNames> {
|
||||||
PackageName(VersionedPackageName<V, N>),
|
PackageName(VersionedPackageName<V, N>),
|
||||||
Url((gix::Url, String)),
|
Url((gix::Url, String)),
|
||||||
Workspace(VersionedPackageName<VersionType, PackageName>),
|
Workspace(VersionedPackageName<VersionTypeOrReq, PackageName>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<anyhow::Error>>
|
impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<anyhow::Error>>
|
||||||
|
@ -163,7 +168,7 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
if let Some(s) = s.strip_prefix("gh#") {
|
if let Some(s) = s.strip_prefix("gh#") {
|
||||||
let s = format!("https://github.com/{s}");
|
let s = format!("https://github.com/{s}");
|
||||||
let (repo, rev) = s.split_once('#').unwrap();
|
let (repo, rev) = s.split_once('#').context("missing revision")?;
|
||||||
|
|
||||||
Ok(AnyPackageIdentifier::Url((
|
Ok(AnyPackageIdentifier::Url((
|
||||||
repo.try_into()?,
|
repo.try_into()?,
|
||||||
|
@ -172,7 +177,7 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
|
||||||
} else if let Some(rest) = s.strip_prefix("workspace:") {
|
} else if let Some(rest) = s.strip_prefix("workspace:") {
|
||||||
Ok(AnyPackageIdentifier::Workspace(rest.parse()?))
|
Ok(AnyPackageIdentifier::Workspace(rest.parse()?))
|
||||||
} else if s.contains(':') {
|
} else if s.contains(':') {
|
||||||
let (url, rev) = s.split_once('#').unwrap();
|
let (url, rev) = s.split_once('#').context("missing revision")?;
|
||||||
|
|
||||||
Ok(AnyPackageIdentifier::Url((
|
Ok(AnyPackageIdentifier::Url((
|
||||||
url.try_into()?,
|
url.try_into()?,
|
||||||
|
@ -188,72 +193,37 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
|
||||||
s.try_into()
|
s.try_into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize_string_url_map<S: Serializer>(
|
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
|
||||||
url: &BTreeMap<gix::Url, String>,
|
len: u64,
|
||||||
serializer: S,
|
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
|
||||||
) -> Result<S::Ok, S::Error> {
|
prefix: String,
|
||||||
let mut map = serializer.serialize_map(Some(url.len()))?;
|
|
||||||
for (k, v) in url {
|
|
||||||
map.serialize_entry(&k.to_bstring().to_string(), v)?;
|
|
||||||
}
|
|
||||||
map.end()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_string_url_map<'de, D: Deserializer<'de>>(
|
|
||||||
deserializer: D,
|
|
||||||
) -> Result<BTreeMap<gix::Url, String>, D::Error> {
|
|
||||||
BTreeMap::<String, String>::deserialize(deserializer)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(k, v)| {
|
|
||||||
gix::Url::from_bytes(BStr::new(&k))
|
|
||||||
.map(|k| (k, v))
|
|
||||||
.map_err(serde::de::Error::custom)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub fn download_graph(
|
|
||||||
project: &Project,
|
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
graph: &DependencyGraph,
|
|
||||||
multi: &MultiProgress,
|
|
||||||
reqwest: &reqwest::blocking::Client,
|
|
||||||
threads: usize,
|
|
||||||
prod: bool,
|
|
||||||
write: bool,
|
|
||||||
progress_msg: String,
|
progress_msg: String,
|
||||||
finish_msg: String,
|
finish_msg: String,
|
||||||
) -> anyhow::Result<DownloadedGraph> {
|
) -> anyhow::Result<()> {
|
||||||
let bar = multi.add(
|
let bar = indicatif::ProgressBar::new(len)
|
||||||
indicatif::ProgressBar::new(graph.values().map(|versions| versions.len() as u64).sum())
|
.with_style(
|
||||||
.with_style(
|
indicatif::ProgressStyle::default_bar()
|
||||||
indicatif::ProgressStyle::default_bar()
|
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
||||||
.template("{msg} {bar:40.208/166} {pos}/{len} {percent}% {elapsed_precise}")?,
|
.progress_chars("█▓▒░ "),
|
||||||
)
|
)
|
||||||
.with_message(progress_msg),
|
.with_prefix(prefix)
|
||||||
);
|
.with_message(progress_msg);
|
||||||
bar.enable_steady_tick(Duration::from_millis(100));
|
bar.enable_steady_tick(Duration::from_millis(100));
|
||||||
|
|
||||||
let (rx, downloaded_graph) = project
|
while let Some(result) = rx.recv().await {
|
||||||
.download_graph(graph, refreshed_sources, reqwest, threads, prod, write)
|
|
||||||
.context("failed to download dependencies")?;
|
|
||||||
|
|
||||||
while let Ok(result) = rx.recv() {
|
|
||||||
bar.inc(1);
|
bar.inc(1);
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(()) => {}
|
Ok(text) => {
|
||||||
|
bar.set_message(text);
|
||||||
|
}
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bar.finish_with_message(finish_msg);
|
bar.finish_with_message(finish_msg);
|
||||||
|
|
||||||
Ok(Arc::into_inner(downloaded_graph)
|
Ok(())
|
||||||
.unwrap()
|
|
||||||
.into_inner()
|
|
||||||
.unwrap())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
|
pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
|
||||||
|
@ -266,41 +236,68 @@ pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_on_workspace_members(
|
pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
||||||
project: &Project,
|
project: &Project,
|
||||||
f: impl Fn(Project) -> anyhow::Result<()>,
|
f: impl Fn(Project) -> F,
|
||||||
) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> {
|
) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> {
|
||||||
Ok(match project.workspace_dir() {
|
// this might seem counterintuitive, but remember that
|
||||||
Some(_) => {
|
// the presence of a workspace dir means that this project is a member of one
|
||||||
// this might seem counterintuitive, but remember that
|
if project.workspace_dir().is_some() {
|
||||||
// the presence of a workspace dir means that this project is a member of one
|
return Ok(Default::default());
|
||||||
Default::default()
|
}
|
||||||
|
|
||||||
|
let members_future = project
|
||||||
|
.workspace_members(project.package_dir(), true)
|
||||||
|
.await?;
|
||||||
|
pin!(members_future);
|
||||||
|
|
||||||
|
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
||||||
|
|
||||||
|
while let Some((path, manifest)) = members_future.next().await.transpose()? {
|
||||||
|
let relative_path =
|
||||||
|
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
||||||
|
|
||||||
|
// don't run on the current workspace root
|
||||||
|
if relative_path != "" {
|
||||||
|
f(shift_project_dir(project, path)).await?;
|
||||||
}
|
}
|
||||||
None => project
|
|
||||||
.workspace_members(project.package_dir())
|
results
|
||||||
.context("failed to get workspace members")?
|
.entry(manifest.name)
|
||||||
.into_iter()
|
.or_default()
|
||||||
.map(|(path, manifest)| {
|
.insert(manifest.target.kind(), relative_path);
|
||||||
(
|
}
|
||||||
manifest.name,
|
|
||||||
manifest.target.kind(),
|
Ok(results)
|
||||||
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap())
|
}
|
||||||
.unwrap(),
|
|
||||||
)
|
pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
|
||||||
})
|
if let Err(err) = result {
|
||||||
.map(|(name, target, path)| {
|
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
|
||||||
f(shift_project_dir(
|
|
||||||
project,
|
let cause = err.chain().skip(1).collect::<Vec<_>>();
|
||||||
path.to_path(project.package_dir()),
|
|
||||||
))
|
if !cause.is_empty() {
|
||||||
.map(|_| (name, target, path))
|
eprintln!("{}:", "caused by".red().bold());
|
||||||
})
|
for err in cause {
|
||||||
.collect::<Result<Vec<_>, _>>()
|
eprintln!(" - {err}");
|
||||||
.context("failed to install workspace member's dependencies")?
|
}
|
||||||
.into_iter()
|
}
|
||||||
.fold(BTreeMap::new(), |mut map, (name, target, path)| {
|
|
||||||
map.entry(name).or_default().insert(target, path);
|
let backtrace = err.backtrace();
|
||||||
map
|
match backtrace.status() {
|
||||||
}),
|
std::backtrace::BacktraceStatus::Disabled => {
|
||||||
})
|
eprintln!(
|
||||||
|
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
|
||||||
|
"help".yellow().bold()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
std::backtrace::BacktraceStatus::Captured => {
|
||||||
|
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
114
src/cli/repos.rs
|
@ -1,114 +0,0 @@
|
||||||
use crate::{
|
|
||||||
cli::{config::read_config, home_dir},
|
|
||||||
util::authenticate_conn,
|
|
||||||
};
|
|
||||||
use anyhow::Context;
|
|
||||||
use gix::remote::Direction;
|
|
||||||
use pesde::Project;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
fn update_repo<P: AsRef<Path>>(
|
|
||||||
name: &str,
|
|
||||||
path: P,
|
|
||||||
url: gix::Url,
|
|
||||||
project: &Project,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
if path.exists() {
|
|
||||||
let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
|
|
||||||
|
|
||||||
let remote = repo
|
|
||||||
.find_default_remote(Direction::Fetch)
|
|
||||||
.context(format!("missing default remote of {name} repository"))?
|
|
||||||
.context(format!(
|
|
||||||
"failed to find default remote of {name} repository"
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let mut connection = remote.connect(Direction::Fetch).context(format!(
|
|
||||||
"failed to connect to default remote of {name} repository"
|
|
||||||
))?;
|
|
||||||
|
|
||||||
authenticate_conn(&mut connection, project.auth_config());
|
|
||||||
|
|
||||||
let results = connection
|
|
||||||
.prepare_fetch(gix::progress::Discard, Default::default())
|
|
||||||
.context(format!("failed to prepare {name} repository fetch"))?
|
|
||||||
.receive(gix::progress::Discard, &false.into())
|
|
||||||
.context(format!("failed to receive new {name} repository contents"))?;
|
|
||||||
|
|
||||||
let remote_ref = results
|
|
||||||
.ref_map
|
|
||||||
.remote_refs
|
|
||||||
.first()
|
|
||||||
.context(format!("failed to get remote refs of {name} repository"))?;
|
|
||||||
|
|
||||||
let unpacked = remote_ref.unpack();
|
|
||||||
let oid = unpacked
|
|
||||||
.1
|
|
||||||
.or(unpacked.2)
|
|
||||||
.context("couldn't find oid in remote ref")?;
|
|
||||||
|
|
||||||
let tree = repo
|
|
||||||
.find_object(oid)
|
|
||||||
.context(format!("failed to find {name} repository tree"))?
|
|
||||||
.peel_to_tree()
|
|
||||||
.context(format!("failed to peel {name} repository object to tree"))?;
|
|
||||||
|
|
||||||
let mut index = gix::index::File::from_state(
|
|
||||||
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
|
|
||||||
format!("failed to create index state from {name} repository tree"),
|
|
||||||
)?,
|
|
||||||
repo.index_path(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let opts = gix::worktree::state::checkout::Options {
|
|
||||||
overwrite_existing: true,
|
|
||||||
destination_is_initially_empty: false,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
gix::worktree::state::checkout(
|
|
||||||
&mut index,
|
|
||||||
repo.work_dir().context(format!("{name} repo is bare"))?,
|
|
||||||
repo.objects
|
|
||||||
.clone()
|
|
||||||
.into_arc()
|
|
||||||
.context("failed to clone objects")?,
|
|
||||||
&gix::progress::Discard,
|
|
||||||
&gix::progress::Discard,
|
|
||||||
&false.into(),
|
|
||||||
opts,
|
|
||||||
)
|
|
||||||
.context(format!("failed to checkout {name} repository"))?;
|
|
||||||
|
|
||||||
index
|
|
||||||
.write(gix::index::write::Options::default())
|
|
||||||
.context("failed to write index")?;
|
|
||||||
} else {
|
|
||||||
std::fs::create_dir_all(path).context(format!("failed to create {name} directory"))?;
|
|
||||||
|
|
||||||
gix::prepare_clone(url, path)
|
|
||||||
.context(format!("failed to prepare {name} repository clone"))?
|
|
||||||
.fetch_then_checkout(gix::progress::Discard, &false.into())
|
|
||||||
.context(format!("failed to fetch and checkout {name} repository"))?
|
|
||||||
.0
|
|
||||||
.main_worktree(gix::progress::Discard, &false.into())
|
|
||||||
.context(format!("failed to set {name} repository as main worktree"))?;
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_repo_dependencies(project: &Project) -> anyhow::Result<()> {
|
|
||||||
let home_dir = home_dir()?;
|
|
||||||
let config = read_config()?;
|
|
||||||
|
|
||||||
update_repo(
|
|
||||||
"scripts",
|
|
||||||
home_dir.join("scripts"),
|
|
||||||
config.scripts_repo,
|
|
||||||
project,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,16 +1,22 @@
|
||||||
use anyhow::Context;
|
|
||||||
use colored::Colorize;
|
|
||||||
use reqwest::header::ACCEPT;
|
|
||||||
use semver::Version;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use std::{fs::create_dir_all, io::Read, path::PathBuf};
|
|
||||||
|
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
bin_dir,
|
bin_dir,
|
||||||
config::{read_config, write_config, CliConfig},
|
config::{read_config, write_config, CliConfig},
|
||||||
files::make_executable,
|
files::make_executable,
|
||||||
home_dir,
|
home_dir,
|
||||||
};
|
};
|
||||||
|
use anyhow::Context;
|
||||||
|
use colored::Colorize;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use reqwest::header::ACCEPT;
|
||||||
|
use semver::Version;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::{
|
||||||
|
env::current_exe,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
use tokio::io::AsyncWrite;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub fn current_version() -> Version {
|
pub fn current_version() -> Version {
|
||||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||||
|
@ -28,79 +34,158 @@ struct Asset {
|
||||||
url: url::Url,
|
url: url::Url,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
fn get_repo() -> (String, String) {
|
fn get_repo() -> (String, String) {
|
||||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||||
(
|
let (owner, repo) = (
|
||||||
parts.next().unwrap().to_string(),
|
parts.next().unwrap().to_string(),
|
||||||
parts.next().unwrap().to_string(),
|
parts.next().unwrap().to_string(),
|
||||||
)
|
);
|
||||||
|
|
||||||
|
tracing::trace!("repository for updates: {owner}/{repo}");
|
||||||
|
|
||||||
|
(owner, repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum VersionType {
|
||||||
|
Latest,
|
||||||
|
Specific(Version),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
|
pub async fn get_remote_version(
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
|
ty: VersionType,
|
||||||
|
) -> anyhow::Result<Version> {
|
||||||
|
let (owner, repo) = get_repo();
|
||||||
|
|
||||||
|
let mut releases = reqwest
|
||||||
|
.get(format!(
|
||||||
|
"https://api.github.com/repos/{owner}/{repo}/releases",
|
||||||
|
))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("failed to send request to GitHub API")?
|
||||||
|
.error_for_status()
|
||||||
|
.context("failed to get GitHub API response")?
|
||||||
|
.json::<Vec<Release>>()
|
||||||
|
.await
|
||||||
|
.context("failed to parse GitHub API response")?
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
|
||||||
|
|
||||||
|
match ty {
|
||||||
|
VersionType::Latest => releases.max(),
|
||||||
|
VersionType::Specific(version) => {
|
||||||
|
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.context("failed to find latest version")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn no_build_metadata(version: &Version) -> Version {
|
||||||
|
let mut version = version.clone();
|
||||||
|
version.build = semver::BuildMetadata::EMPTY;
|
||||||
|
version
|
||||||
}
|
}
|
||||||
|
|
||||||
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
||||||
|
|
||||||
pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<()> {
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
let (owner, repo) = get_repo();
|
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
||||||
|
let config = read_config().await?;
|
||||||
let config = read_config()?;
|
|
||||||
|
|
||||||
let version = if let Some((_, version)) = config
|
let version = if let Some((_, version)) = config
|
||||||
.last_checked_updates
|
.last_checked_updates
|
||||||
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
||||||
{
|
{
|
||||||
|
tracing::debug!("using cached version");
|
||||||
version
|
version
|
||||||
} else {
|
} else {
|
||||||
let releases = reqwest
|
tracing::debug!("checking for updates");
|
||||||
.get(format!(
|
let version = get_remote_version(reqwest, VersionType::Latest).await?;
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases",
|
|
||||||
))
|
|
||||||
.send()
|
|
||||||
.context("failed to send request to GitHub API")?
|
|
||||||
.error_for_status()
|
|
||||||
.context("failed to get GitHub API response")?
|
|
||||||
.json::<Vec<Release>>()
|
|
||||||
.context("failed to parse GitHub API response")?;
|
|
||||||
|
|
||||||
let version = releases
|
|
||||||
.into_iter()
|
|
||||||
.map(|release| Version::parse(release.tag_name.trim_start_matches('v')).unwrap())
|
|
||||||
.max()
|
|
||||||
.context("failed to find latest version")?;
|
|
||||||
|
|
||||||
write_config(&CliConfig {
|
write_config(&CliConfig {
|
||||||
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
||||||
..config
|
..config
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
version
|
version
|
||||||
};
|
};
|
||||||
|
let current_version = current_version();
|
||||||
|
let version_no_metadata = no_build_metadata(&version);
|
||||||
|
|
||||||
if version > current_version() {
|
if version_no_metadata <= current_version {
|
||||||
let name = env!("CARGO_PKG_NAME");
|
return Ok(());
|
||||||
|
|
||||||
let unformatted_message = format!("a new version of {name} is available: {version}");
|
|
||||||
|
|
||||||
let message = format!(
|
|
||||||
"a new version of {} is available: {}",
|
|
||||||
name.cyan(),
|
|
||||||
version.to_string().yellow().bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
let stars = "-"
|
|
||||||
.repeat(unformatted_message.len() + 4)
|
|
||||||
.bright_magenta()
|
|
||||||
.bold();
|
|
||||||
let column = "|".bright_magenta().bold();
|
|
||||||
|
|
||||||
println!("\n{stars}\n{column} {message} {column}\n{stars}\n",);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let name = env!("CARGO_BIN_NAME");
|
||||||
|
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
|
||||||
|
|
||||||
|
let unformatted_messages = [
|
||||||
|
"".to_string(),
|
||||||
|
format!("update available! {current_version} → {version_no_metadata}"),
|
||||||
|
format!("changelog: {changelog}"),
|
||||||
|
format!("run `{name} self-upgrade` to upgrade"),
|
||||||
|
"".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let width = unformatted_messages
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.chars().count())
|
||||||
|
.max()
|
||||||
|
.unwrap()
|
||||||
|
+ 4;
|
||||||
|
|
||||||
|
let column = "│".bright_magenta();
|
||||||
|
|
||||||
|
let message = [
|
||||||
|
"".to_string(),
|
||||||
|
format!(
|
||||||
|
"update available! {} → {}",
|
||||||
|
current_version.to_string().red(),
|
||||||
|
version_no_metadata.to_string().green()
|
||||||
|
),
|
||||||
|
format!("changelog: {}", changelog.blue()),
|
||||||
|
format!(
|
||||||
|
"run `{} {}` to upgrade",
|
||||||
|
name.blue(),
|
||||||
|
"self-upgrade".yellow()
|
||||||
|
),
|
||||||
|
"".to_string(),
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, s)| {
|
||||||
|
let text_length = unformatted_messages[i].chars().count();
|
||||||
|
let padding = (width as f32 - text_length as f32) / 2f32;
|
||||||
|
let padding_l = " ".repeat(padding.floor() as usize);
|
||||||
|
let padding_r = " ".repeat(padding.ceil() as usize);
|
||||||
|
format!("{column}{padding_l}{s}{padding_r}{column}")
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
let lines = "─".repeat(width).bright_magenta();
|
||||||
|
|
||||||
|
let tl = "╭".bright_magenta();
|
||||||
|
let tr = "╮".bright_magenta();
|
||||||
|
let bl = "╰".bright_magenta();
|
||||||
|
let br = "╯".bright_magenta();
|
||||||
|
|
||||||
|
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn download_github_release(
|
#[instrument(skip(reqwest, writer), level = "trace")]
|
||||||
reqwest: &reqwest::blocking::Client,
|
pub async fn download_github_release<W: AsyncWrite + Unpin>(
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
version: &Version,
|
version: &Version,
|
||||||
) -> anyhow::Result<Vec<u8>> {
|
mut writer: W,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
let (owner, repo) = get_repo();
|
let (owner, repo) = get_repo();
|
||||||
|
|
||||||
let release = reqwest
|
let release = reqwest
|
||||||
|
@ -108,10 +193,12 @@ pub fn download_github_release(
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
|
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
|
||||||
))
|
))
|
||||||
.send()
|
.send()
|
||||||
|
.await
|
||||||
.context("failed to send request to GitHub API")?
|
.context("failed to send request to GitHub API")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.context("failed to get GitHub API response")?
|
.context("failed to get GitHub API response")?
|
||||||
.json::<Release>()
|
.json::<Release>()
|
||||||
|
.await
|
||||||
.context("failed to parse GitHub API response")?;
|
.context("failed to parse GitHub API response")?;
|
||||||
|
|
||||||
let asset = release
|
let asset = release
|
||||||
|
@ -130,40 +217,65 @@ pub fn download_github_release(
|
||||||
.get(asset.url)
|
.get(asset.url)
|
||||||
.header(ACCEPT, "application/octet-stream")
|
.header(ACCEPT, "application/octet-stream")
|
||||||
.send()
|
.send()
|
||||||
|
.await
|
||||||
.context("failed to send request to download asset")?
|
.context("failed to send request to download asset")?
|
||||||
.error_for_status()
|
.error_for_status()
|
||||||
.context("failed to download asset")?
|
.context("failed to download asset")?
|
||||||
.bytes()
|
.bytes()
|
||||||
|
.await
|
||||||
.context("failed to download asset")?;
|
.context("failed to download asset")?;
|
||||||
|
|
||||||
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
|
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
|
||||||
let mut archive = tar::Archive::new(&mut decoder);
|
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||||
|
|
||||||
let entry = archive
|
let mut entry = archive
|
||||||
.entries()
|
.entries()
|
||||||
.context("failed to read archive entries")?
|
.context("failed to read archive entries")?
|
||||||
.next()
|
.next()
|
||||||
|
.await
|
||||||
.context("archive has no entry")?
|
.context("archive has no entry")?
|
||||||
.context("failed to get first archive entry")?;
|
.context("failed to get first archive entry")?;
|
||||||
|
|
||||||
entry
|
tokio::io::copy(&mut entry, &mut writer)
|
||||||
.bytes()
|
.await
|
||||||
.collect::<Result<Vec<u8>, std::io::Error>>()
|
.context("failed to write archive entry to file")
|
||||||
.context("failed to read archive entry bytes")
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_or_download_version(
|
#[derive(Debug)]
|
||||||
reqwest: &reqwest::blocking::Client,
|
pub enum TagInfo {
|
||||||
version: &Version,
|
Complete(Version),
|
||||||
|
Incomplete(Version),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
|
pub async fn get_or_download_version(
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
|
tag: &TagInfo,
|
||||||
|
always_give_path: bool,
|
||||||
) -> anyhow::Result<Option<PathBuf>> {
|
) -> anyhow::Result<Option<PathBuf>> {
|
||||||
let path = home_dir()?.join("versions");
|
let path = home_dir()?.join("versions");
|
||||||
create_dir_all(&path).context("failed to create versions directory")?;
|
fs::create_dir_all(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to create versions directory")?;
|
||||||
|
|
||||||
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
|
let version = match tag {
|
||||||
|
TagInfo::Complete(version) => version,
|
||||||
|
// don't fetch the version since it could be cached
|
||||||
|
TagInfo::Incomplete(version) => version,
|
||||||
|
};
|
||||||
|
|
||||||
let is_requested_version = *version == current_version();
|
let path = path.join(format!(
|
||||||
|
"{}{}",
|
||||||
|
no_build_metadata(version),
|
||||||
|
std::env::consts::EXE_SUFFIX
|
||||||
|
));
|
||||||
|
|
||||||
|
let is_requested_version = !always_give_path && *version == current_version();
|
||||||
|
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
|
tracing::debug!("version already exists");
|
||||||
|
|
||||||
return Ok(if is_requested_version {
|
return Ok(if is_requested_version {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
@ -172,14 +284,34 @@ pub fn get_or_download_version(
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_requested_version {
|
if is_requested_version {
|
||||||
std::fs::copy(std::env::current_exe()?, &path)
|
tracing::debug!("copying current executable to version directory");
|
||||||
|
fs::copy(current_exe()?, &path)
|
||||||
|
.await
|
||||||
.context("failed to copy current executable to version directory")?;
|
.context("failed to copy current executable to version directory")?;
|
||||||
} else {
|
} else {
|
||||||
let bytes = download_github_release(reqwest, version)?;
|
let version = match tag {
|
||||||
std::fs::write(&path, bytes).context("failed to write downloaded version file")?;
|
TagInfo::Complete(version) => version.clone(),
|
||||||
|
TagInfo::Incomplete(version) => {
|
||||||
|
get_remote_version(reqwest, VersionType::Specific(version.clone()))
|
||||||
|
.await
|
||||||
|
.context("failed to get remote version")?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!("downloading version");
|
||||||
|
download_github_release(
|
||||||
|
reqwest,
|
||||||
|
&version,
|
||||||
|
fs::File::create(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to create version file")?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
make_executable(&path).context("failed to make downloaded version executable")?;
|
make_executable(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to make downloaded version executable")?;
|
||||||
|
|
||||||
Ok(if is_requested_version {
|
Ok(if is_requested_version {
|
||||||
None
|
None
|
||||||
|
@ -188,48 +320,42 @@ pub fn get_or_download_version(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn max_installed_version() -> anyhow::Result<Version> {
|
#[instrument(level = "trace")]
|
||||||
let versions_dir = home_dir()?.join("versions");
|
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
||||||
create_dir_all(&versions_dir).context("failed to create versions directory")?;
|
let bin_exe_path = bin_dir().await?.join(format!(
|
||||||
|
|
||||||
let max_version = std::fs::read_dir(versions_dir)
|
|
||||||
.context("failed to read versions directory")?
|
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
|
||||||
.into_iter()
|
|
||||||
.map(|entry| {
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
let name = entry
|
|
||||||
.path()
|
|
||||||
.file_name()
|
|
||||||
.unwrap()
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
#[cfg(windows)]
|
|
||||||
let name = entry
|
|
||||||
.path()
|
|
||||||
.file_stem()
|
|
||||||
.unwrap()
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
Version::parse(&name).unwrap()
|
|
||||||
})
|
|
||||||
.max()
|
|
||||||
.filter(|v| v >= ¤t_version())
|
|
||||||
.unwrap_or_else(current_version);
|
|
||||||
|
|
||||||
Ok(max_version)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_bin_exe() -> anyhow::Result<()> {
|
|
||||||
let copy_to = bin_dir()?.join(format!(
|
|
||||||
"{}{}",
|
"{}{}",
|
||||||
env!("CARGO_BIN_NAME"),
|
env!("CARGO_BIN_NAME"),
|
||||||
std::env::consts::EXE_SUFFIX
|
std::env::consts::EXE_SUFFIX
|
||||||
));
|
));
|
||||||
|
let mut downloaded_file = downloaded_file.to_path_buf();
|
||||||
|
|
||||||
std::fs::copy(std::env::current_exe()?, ©_to)
|
let exists = bin_exe_path.exists();
|
||||||
|
|
||||||
|
if cfg!(target_os = "linux") && exists {
|
||||||
|
fs::remove_file(&bin_exe_path)
|
||||||
|
.await
|
||||||
|
.context("failed to remove existing executable")?;
|
||||||
|
} else if exists {
|
||||||
|
let tempfile = tempfile::Builder::new()
|
||||||
|
.make(|_| Ok(()))
|
||||||
|
.context("failed to create temporary file")?;
|
||||||
|
let path = tempfile.into_temp_path().to_path_buf();
|
||||||
|
#[cfg(windows)]
|
||||||
|
let path = path.with_extension("exe");
|
||||||
|
|
||||||
|
let current_exe = current_exe().context("failed to get current exe path")?;
|
||||||
|
if current_exe == downloaded_file {
|
||||||
|
downloaded_file = path.to_path_buf();
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::rename(&bin_exe_path, &path)
|
||||||
|
.await
|
||||||
|
.context("failed to rename current executable")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::copy(downloaded_file, &bin_exe_path)
|
||||||
|
.await
|
||||||
.context("failed to copy executable to bin folder")?;
|
.context("failed to copy executable to bin folder")?;
|
||||||
|
|
||||||
make_executable(©_to)
|
make_executable(&bin_exe_path).await
|
||||||
}
|
}
|
||||||
|
|
163
src/download.rs
|
@ -1,114 +1,157 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::{DependencyGraph, DownloadedDependencyGraphNode, DownloadedGraph},
|
lockfile::{DependencyGraph, DownloadedDependencyGraphNode, DownloadedGraph},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
|
refresh_sources,
|
||||||
source::{
|
source::{
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{PackageRef, PackageSource},
|
||||||
PackageSources,
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project, PACKAGES_CONTAINER_NAME,
|
Project, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
|
use fs_err::tokio as fs;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
fs::create_dir_all,
|
sync::{Arc, Mutex},
|
||||||
sync::{mpsc::Receiver, Arc, Mutex},
|
|
||||||
};
|
};
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
||||||
|
|
||||||
type MultithreadDownloadJob = (
|
pub(crate) type MultithreadDownloadJob = (
|
||||||
Receiver<Result<(), errors::DownloadGraphError>>,
|
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
|
||||||
MultithreadedGraph,
|
MultithreadedGraph,
|
||||||
);
|
);
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Downloads a graph of dependencies
|
/// Downloads a graph of dependencies
|
||||||
pub fn download_graph(
|
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
|
||||||
|
pub async fn download_graph(
|
||||||
&self,
|
&self,
|
||||||
graph: &DependencyGraph,
|
graph: &DependencyGraph,
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
refreshed_sources: &mut HashSet<PackageSources>,
|
||||||
reqwest: &reqwest::blocking::Client,
|
reqwest: &reqwest::Client,
|
||||||
threads: usize,
|
|
||||||
prod: bool,
|
prod: bool,
|
||||||
write: bool,
|
write: bool,
|
||||||
|
wally: bool,
|
||||||
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
||||||
let manifest = self.deser_manifest()?;
|
let manifest = self.deser_manifest().await?;
|
||||||
|
let manifest_target_kind = manifest.target.kind();
|
||||||
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
|
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
|
||||||
|
|
||||||
let threadpool = threadpool::ThreadPool::new(threads);
|
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||||
let (tx, rx) = std::sync::mpsc::channel();
|
graph
|
||||||
|
.iter()
|
||||||
|
.map(|(_, versions)| versions.len())
|
||||||
|
.sum::<usize>()
|
||||||
|
.max(1),
|
||||||
|
);
|
||||||
|
|
||||||
|
refresh_sources(
|
||||||
|
self,
|
||||||
|
graph
|
||||||
|
.iter()
|
||||||
|
.flat_map(|(_, versions)| versions.iter())
|
||||||
|
.map(|(_, node)| node.pkg_ref.source()),
|
||||||
|
refreshed_sources,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let project = Arc::new(self.clone());
|
||||||
|
|
||||||
for (name, versions) in graph {
|
for (name, versions) in graph {
|
||||||
for (version_id, node) in versions {
|
for (version_id, node) in versions {
|
||||||
let source = node.pkg_ref.source();
|
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||||
|
if node.pkg_ref.like_wally() != wally {
|
||||||
if refreshed_sources.insert(source.clone()) {
|
continue;
|
||||||
source.refresh(self).map_err(Box::new)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let container_folder = node.container_folder(
|
|
||||||
&self
|
|
||||||
.package_dir()
|
|
||||||
.join(
|
|
||||||
manifest
|
|
||||||
.target
|
|
||||||
.kind()
|
|
||||||
.packages_folder(&node.pkg_ref.target_kind()),
|
|
||||||
)
|
|
||||||
.join(PACKAGES_CONTAINER_NAME),
|
|
||||||
name,
|
|
||||||
version_id.version(),
|
|
||||||
);
|
|
||||||
|
|
||||||
create_dir_all(&container_folder)?;
|
|
||||||
|
|
||||||
let tx = tx.clone();
|
let tx = tx.clone();
|
||||||
|
|
||||||
let name = name.clone();
|
let name = name.clone();
|
||||||
let version_id = version_id.clone();
|
let version_id = version_id.clone();
|
||||||
let node = node.clone();
|
let node = node.clone();
|
||||||
|
|
||||||
let project = Arc::new(self.clone());
|
let span = tracing::info_span!(
|
||||||
|
"download",
|
||||||
|
name = name.to_string(),
|
||||||
|
version_id = version_id.to_string()
|
||||||
|
);
|
||||||
|
|
||||||
|
let project = project.clone();
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
let downloaded_graph = downloaded_graph.clone();
|
let downloaded_graph = downloaded_graph.clone();
|
||||||
|
|
||||||
threadpool.execute(move || {
|
let package_dir = self.package_dir().to_path_buf();
|
||||||
let project = project.clone();
|
|
||||||
|
|
||||||
log::debug!("downloading {name}@{version_id}");
|
tokio::spawn(
|
||||||
|
async move {
|
||||||
|
let source = node.pkg_ref.source();
|
||||||
|
|
||||||
let (fs, target) = match source.download(&node.pkg_ref, &project, &reqwest) {
|
let container_folder = node.container_folder(
|
||||||
Ok(target) => target,
|
&package_dir
|
||||||
Err(e) => {
|
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||||
tx.send(Err(Box::new(e).into())).unwrap();
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
return;
|
&name,
|
||||||
|
version_id.version(),
|
||||||
|
);
|
||||||
|
|
||||||
|
match fs::create_dir_all(&container_folder).await {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
log::debug!("downloaded {name}@{version_id}");
|
let project = project.clone();
|
||||||
|
|
||||||
if write {
|
tracing::debug!("downloading");
|
||||||
if !prod || node.ty != DependencyType::Dev {
|
|
||||||
match fs.write_to(container_folder, project.cas_dir(), true) {
|
let (fs, target) =
|
||||||
Ok(_) => {}
|
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
||||||
|
Ok(target) => target,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
tx.send(Err(Box::new(e).into())).await.unwrap();
|
||||||
.unwrap();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else {
|
|
||||||
log::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode");
|
tracing::debug!("downloaded");
|
||||||
|
|
||||||
|
if write {
|
||||||
|
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||||
|
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
tracing::debug!(
|
||||||
|
"skipping write to disk, dev dependency in prod mode"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let display_name = format!("{name}@{version_id}");
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||||
|
downloaded_graph
|
||||||
|
.entry(name)
|
||||||
|
.or_default()
|
||||||
|
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.send(Ok(display_name)).await.unwrap();
|
||||||
}
|
}
|
||||||
|
.instrument(span),
|
||||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
);
|
||||||
downloaded_graph
|
|
||||||
.entry(name)
|
|
||||||
.or_default()
|
|
||||||
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
|
||||||
|
|
||||||
tx.send(Ok(())).unwrap();
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,7 +176,7 @@ pub mod errors {
|
||||||
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
||||||
|
|
||||||
/// Error interacting with the filesystem
|
/// Error interacting with the filesystem
|
||||||
#[error("error interacting with filesystem")]
|
#[error("error interacting with the filesystem")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
/// Error downloading a package
|
/// Error downloading a package
|
||||||
|
@ -142,6 +185,6 @@ pub mod errors {
|
||||||
|
|
||||||
/// Error writing package contents
|
/// Error writing package contents
|
||||||
#[error("failed to write package contents")]
|
#[error("failed to write package contents")]
|
||||||
WriteFailed(std::io::Error),
|
WriteFailed(#[source] std::io::Error),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
176
src/download_and_link.rs
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
use crate::{
|
||||||
|
lockfile::{DependencyGraph, DownloadedGraph},
|
||||||
|
manifest::DependencyType,
|
||||||
|
source::PackageSources,
|
||||||
|
Project,
|
||||||
|
};
|
||||||
|
use futures::FutureExt;
|
||||||
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
future::Future,
|
||||||
|
sync::{Arc, Mutex as StdMutex},
|
||||||
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
|
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
||||||
|
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
|
||||||
|
if !prod {
|
||||||
|
return graph.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
graph
|
||||||
|
.iter()
|
||||||
|
.map(|(name, versions)| {
|
||||||
|
(
|
||||||
|
name.clone(),
|
||||||
|
versions
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
|
||||||
|
.map(|(v_id, node)| (v_id.clone(), node.clone()))
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Receiver for dependencies downloaded and linked
|
||||||
|
pub type DownloadAndLinkReceiver =
|
||||||
|
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
||||||
|
|
||||||
|
impl Project {
|
||||||
|
/// Downloads a graph of dependencies and links them in the correct order
|
||||||
|
#[instrument(
|
||||||
|
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
|
||||||
|
level = "debug"
|
||||||
|
)]
|
||||||
|
pub async fn download_and_link<
|
||||||
|
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
|
||||||
|
R: Future<Output = Result<(), E>> + Send,
|
||||||
|
E: Send + Sync + 'static,
|
||||||
|
>(
|
||||||
|
&self,
|
||||||
|
graph: &Arc<DependencyGraph>,
|
||||||
|
refreshed_sources: &Arc<Mutex<HashSet<PackageSources>>>,
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
|
prod: bool,
|
||||||
|
write: bool,
|
||||||
|
pesde_cb: F,
|
||||||
|
) -> Result<
|
||||||
|
(
|
||||||
|
DownloadAndLinkReceiver,
|
||||||
|
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
|
||||||
|
),
|
||||||
|
errors::DownloadAndLinkError<E>,
|
||||||
|
> {
|
||||||
|
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||||
|
graph
|
||||||
|
.iter()
|
||||||
|
.map(|(_, versions)| versions.len())
|
||||||
|
.sum::<usize>()
|
||||||
|
.max(1),
|
||||||
|
);
|
||||||
|
let downloaded_graph = Arc::new(StdMutex::new(DownloadedGraph::default()));
|
||||||
|
|
||||||
|
let this = self.clone();
|
||||||
|
let graph = graph.clone();
|
||||||
|
let reqwest = reqwest.clone();
|
||||||
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
rx,
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut refreshed_sources = refreshed_sources.lock().await;
|
||||||
|
|
||||||
|
// step 1. download pesde dependencies
|
||||||
|
let (mut pesde_rx, pesde_graph) = this
|
||||||
|
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
|
||||||
|
.instrument(tracing::debug_span!("download (pesde)"))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
while let Some(result) = pesde_rx.recv().await {
|
||||||
|
tx.send(result).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let pesde_graph = Arc::into_inner(pesde_graph).unwrap().into_inner().unwrap();
|
||||||
|
|
||||||
|
// step 2. link pesde dependencies. do so without types
|
||||||
|
if write {
|
||||||
|
this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
|
||||||
|
.instrument(tracing::debug_span!("link (pesde)"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pesde_graph = Arc::new(pesde_graph);
|
||||||
|
|
||||||
|
pesde_cb(&pesde_graph)
|
||||||
|
.await
|
||||||
|
.map_err(errors::DownloadAndLinkError::PesdeCallback)?;
|
||||||
|
|
||||||
|
let pesde_graph = Arc::into_inner(pesde_graph).unwrap();
|
||||||
|
|
||||||
|
// step 3. download wally dependencies
|
||||||
|
let (mut wally_rx, wally_graph) = this
|
||||||
|
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
|
||||||
|
.instrument(tracing::debug_span!("download (wally)"))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
while let Some(result) = wally_rx.recv().await {
|
||||||
|
tx.send(result).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let wally_graph = Arc::into_inner(wally_graph).unwrap().into_inner().unwrap();
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||||
|
downloaded_graph.extend(pesde_graph);
|
||||||
|
for (name, versions) in wally_graph {
|
||||||
|
for (version_id, node) in versions {
|
||||||
|
downloaded_graph
|
||||||
|
.entry(name.clone())
|
||||||
|
.or_default()
|
||||||
|
.insert(version_id, node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let graph = Arc::into_inner(downloaded_graph)
|
||||||
|
.unwrap()
|
||||||
|
.into_inner()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// step 4. link ALL dependencies. do so with types
|
||||||
|
if write {
|
||||||
|
this.link_dependencies(&filter_graph(&graph, prod), true)
|
||||||
|
.instrument(tracing::debug_span!("link (all)"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(graph)
|
||||||
|
})
|
||||||
|
.map(|r| r.unwrap()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when downloading and linking dependencies
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// An error that can occur when downloading and linking dependencies
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum DownloadAndLinkError<E> {
|
||||||
|
/// An error occurred while downloading the graph
|
||||||
|
#[error("error downloading graph")]
|
||||||
|
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
|
||||||
|
|
||||||
|
/// An error occurred while linking dependencies
|
||||||
|
#[error("error linking dependencies")]
|
||||||
|
Linking(#[from] crate::linking::errors::LinkingError),
|
||||||
|
|
||||||
|
/// An error occurred while executing the pesde callback
|
||||||
|
#[error("error executing pesde callback")]
|
||||||
|
PesdeCallback(#[source] E),
|
||||||
|
}
|
||||||
|
}
|
309
src/lib.rs
|
@ -3,15 +3,27 @@
|
||||||
//! pesde has its own registry, however it can also use Wally, and Git repositories as package sources.
|
//! pesde has its own registry, however it can also use Wally, and Git repositories as package sources.
|
||||||
//! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
|
//! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
|
||||||
|
|
||||||
use crate::{lockfile::Lockfile, manifest::Manifest};
|
use crate::{
|
||||||
|
lockfile::Lockfile,
|
||||||
|
manifest::Manifest,
|
||||||
|
source::{traits::PackageSource, PackageSources},
|
||||||
|
};
|
||||||
|
use async_stream::stream;
|
||||||
|
use fs_err::tokio as fs;
|
||||||
|
use futures::{future::try_join_all, Stream};
|
||||||
use gix::sec::identity::Account;
|
use gix::sec::identity::Account;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::{HashMap, HashSet},
|
||||||
|
fmt::Debug,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
use tracing::instrument;
|
||||||
|
use wax::Pattern;
|
||||||
|
|
||||||
/// Downloading packages
|
/// Downloading packages
|
||||||
pub mod download;
|
pub mod download;
|
||||||
|
/// Utility for downloading and linking in the correct order
|
||||||
|
pub mod download_and_link;
|
||||||
/// Linking packages
|
/// Linking packages
|
||||||
pub mod linking;
|
pub mod linking;
|
||||||
/// Lockfile
|
/// Lockfile
|
||||||
|
@ -40,12 +52,13 @@ pub const DEFAULT_INDEX_NAME: &str = "default";
|
||||||
/// The name of the packages container
|
/// The name of the packages container
|
||||||
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
|
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
|
||||||
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
|
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
|
||||||
|
/// The folder in which scripts are linked
|
||||||
|
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
|
||||||
|
|
||||||
/// Struct containing the authentication configuration
|
/// Struct containing the authentication configuration
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct AuthConfig {
|
pub struct AuthConfig {
|
||||||
default_token: Option<String>,
|
tokens: HashMap<gix::Url, String>,
|
||||||
token_overrides: HashMap<gix::Url, String>,
|
|
||||||
git_credentials: Option<Account>,
|
git_credentials: Option<Account>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,18 +68,12 @@ impl AuthConfig {
|
||||||
AuthConfig::default()
|
AuthConfig::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the default token
|
/// Set the tokens
|
||||||
pub fn with_default_token<S: AsRef<str>>(mut self, token: Option<S>) -> Self {
|
pub fn with_tokens<I: IntoIterator<Item = (gix::Url, S)>, S: AsRef<str>>(
|
||||||
self.default_token = token.map(|s| s.as_ref().to_string());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the token overrides
|
|
||||||
pub fn with_token_overrides<I: IntoIterator<Item = (gix::Url, S)>, S: AsRef<str>>(
|
|
||||||
mut self,
|
mut self,
|
||||||
tokens: I,
|
tokens: I,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.token_overrides = tokens
|
self.tokens = tokens
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(url, s)| (url, s.as_ref().to_string()))
|
.map(|(url, s)| (url, s.as_ref().to_string()))
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -79,27 +86,15 @@ impl AuthConfig {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the default token
|
/// Get the tokens
|
||||||
pub fn default_token(&self) -> Option<&str> {
|
pub fn tokens(&self) -> &HashMap<gix::Url, String> {
|
||||||
self.default_token.as_deref()
|
&self.tokens
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the token overrides
|
|
||||||
pub fn token_overrides(&self) -> &HashMap<gix::Url, String> {
|
|
||||||
&self.token_overrides
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the git credentials
|
/// Get the git credentials
|
||||||
pub fn git_credentials(&self) -> Option<&Account> {
|
pub fn git_credentials(&self) -> Option<&Account> {
|
||||||
self.git_credentials.as_ref()
|
self.git_credentials.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_token(&self, url: &gix::Url) -> Option<&str> {
|
|
||||||
self.token_overrides
|
|
||||||
.get(url)
|
|
||||||
.map(|s| s.as_str())
|
|
||||||
.or(self.default_token.as_deref())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main struct of the pesde library, representing a project
|
/// The main struct of the pesde library, representing a project
|
||||||
|
@ -156,71 +151,244 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the manifest file
|
/// Read the manifest file
|
||||||
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||||
|
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(string)
|
Ok(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: cache the manifest
|
||||||
/// Deserialize the manifest file
|
/// Deserialize the manifest file
|
||||||
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
||||||
|
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the manifest file
|
/// Write the manifest file
|
||||||
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
#[instrument(skip(self, manifest), level = "debug")]
|
||||||
std::fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
|
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
||||||
|
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize the lockfile
|
/// Deserialize the lockfile
|
||||||
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
|
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
||||||
|
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the lockfile
|
/// Write the lockfile
|
||||||
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> {
|
#[instrument(skip(self, lockfile), level = "debug")]
|
||||||
|
pub async fn write_lockfile(
|
||||||
|
&self,
|
||||||
|
lockfile: Lockfile,
|
||||||
|
) -> Result<(), errors::LockfileWriteError> {
|
||||||
let string = toml::to_string(&lockfile)?;
|
let string = toml::to_string(&lockfile)?;
|
||||||
std::fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
|
fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the workspace members
|
/// Get the workspace members
|
||||||
pub fn workspace_members<P: AsRef<Path>>(
|
#[instrument(skip(self), level = "debug")]
|
||||||
|
pub async fn workspace_members<P: AsRef<Path> + Debug>(
|
||||||
&self,
|
&self,
|
||||||
dir: P,
|
dir: P,
|
||||||
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> {
|
can_ref_self: bool,
|
||||||
|
) -> Result<
|
||||||
|
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
|
||||||
|
errors::WorkspaceMembersError,
|
||||||
|
> {
|
||||||
let dir = dir.as_ref().to_path_buf();
|
let dir = dir.as_ref().to_path_buf();
|
||||||
let manifest = std::fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
|
let manifest = fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
|
||||||
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(dir.to_path_buf(), e))?;
|
.await
|
||||||
|
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
|
||||||
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
||||||
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let members = manifest
|
let members = matching_globs(
|
||||||
.workspace_members
|
dir,
|
||||||
.into_iter()
|
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||||
.map(|glob| dir.join(glob))
|
false,
|
||||||
.map(|path| glob::glob(&path.as_os_str().to_string_lossy()))
|
can_ref_self,
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
)
|
||||||
.into_iter()
|
.await?;
|
||||||
.flat_map(|paths| paths.into_iter())
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
members
|
Ok(stream! {
|
||||||
.into_iter()
|
for path in members {
|
||||||
.map(|path| {
|
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
||||||
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
.await
|
||||||
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(path.clone(), e))?;
|
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
|
||||||
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
||||||
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
|
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
Ok((path, manifest))
|
|
||||||
})
|
yield Ok((path, manifest));
|
||||||
.collect::<Result<_, _>>()
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets all matching paths in a directory
|
||||||
|
#[deprecated(
|
||||||
|
since = "0.5.0-rc.13",
|
||||||
|
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
|
||||||
|
)]
|
||||||
|
#[instrument(ret, level = "trace")]
|
||||||
|
pub async fn matching_globs_old_behaviour<
|
||||||
|
'a,
|
||||||
|
P: AsRef<Path> + Debug,
|
||||||
|
I: IntoIterator<Item = &'a str> + Debug,
|
||||||
|
>(
|
||||||
|
dir: P,
|
||||||
|
globs: I,
|
||||||
|
relative: bool,
|
||||||
|
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||||
|
let (negative_globs, positive_globs) = globs
|
||||||
|
.into_iter()
|
||||||
|
.partition::<Vec<_>, _>(|glob| glob.starts_with('!'));
|
||||||
|
|
||||||
|
let negative_globs = wax::any(
|
||||||
|
negative_globs
|
||||||
|
.into_iter()
|
||||||
|
.map(|glob| wax::Glob::new(&glob[1..]))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let (positive_globs, file_names) = positive_globs
|
||||||
|
.into_iter()
|
||||||
|
// only globs we can be sure of (maintaining compatibility with old "only file/dir name" system)
|
||||||
|
.partition::<Vec<_>, _>(|glob| glob.contains('/'));
|
||||||
|
let file_names = file_names.into_iter().collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
let positive_globs = wax::any(
|
||||||
|
positive_globs
|
||||||
|
.into_iter()
|
||||||
|
.map(wax::Glob::new)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut read_dirs = vec![(fs::read_dir(dir.as_ref().to_path_buf()).await?, false)];
|
||||||
|
let mut paths = HashSet::new();
|
||||||
|
|
||||||
|
let mut is_root = true;
|
||||||
|
|
||||||
|
while let Some((mut read_dir, is_entire_dir_included)) = read_dirs.pop() {
|
||||||
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
|
let path = entry.path();
|
||||||
|
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||||
|
let file_name = path.file_name().unwrap();
|
||||||
|
let is_filename_match =
|
||||||
|
is_root && file_name.to_str().is_some_and(|s| file_names.contains(s));
|
||||||
|
|
||||||
|
if entry.file_type().await?.is_dir() {
|
||||||
|
read_dirs.push((
|
||||||
|
fs::read_dir(&path).await?,
|
||||||
|
is_entire_dir_included || is_filename_match,
|
||||||
|
));
|
||||||
|
if is_filename_match {
|
||||||
|
tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (is_entire_dir_included || is_filename_match)
|
||||||
|
|| (positive_globs.is_match(relative_path)
|
||||||
|
&& !negative_globs.is_match(relative_path))
|
||||||
|
{
|
||||||
|
paths.insert(if relative {
|
||||||
|
relative_path.to_path_buf()
|
||||||
|
} else {
|
||||||
|
path.to_path_buf()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is_root = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets all matching paths in a directory
|
||||||
|
#[instrument(ret, level = "trace")]
|
||||||
|
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
|
||||||
|
dir: P,
|
||||||
|
globs: I,
|
||||||
|
relative: bool,
|
||||||
|
can_ref_self: bool,
|
||||||
|
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||||
|
let (negative_globs, mut positive_globs): (HashSet<&str>, _) =
|
||||||
|
globs.into_iter().partition(|glob| glob.starts_with('!'));
|
||||||
|
|
||||||
|
let include_self = positive_globs.remove(".") && can_ref_self;
|
||||||
|
|
||||||
|
let negative_globs = wax::any(
|
||||||
|
negative_globs
|
||||||
|
.into_iter()
|
||||||
|
.map(|glob| wax::Glob::new(&glob[1..]))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
|
)?;
|
||||||
|
let positive_globs = wax::any(
|
||||||
|
positive_globs
|
||||||
|
.into_iter()
|
||||||
|
.map(wax::Glob::new)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf()).await?];
|
||||||
|
let mut paths = HashSet::new();
|
||||||
|
|
||||||
|
if include_self {
|
||||||
|
paths.insert(if relative {
|
||||||
|
PathBuf::new()
|
||||||
|
} else {
|
||||||
|
dir.as_ref().to_path_buf()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
while let Some(mut read_dir) = read_dirs.pop() {
|
||||||
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
|
let path = entry.path();
|
||||||
|
if entry.file_type().await?.is_dir() {
|
||||||
|
read_dirs.push(fs::read_dir(&path).await?);
|
||||||
|
}
|
||||||
|
|
||||||
|
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||||
|
|
||||||
|
if positive_globs.is_match(relative_path) && !negative_globs.is_match(relative_path) {
|
||||||
|
paths.insert(if relative {
|
||||||
|
relative_path.to_path_buf()
|
||||||
|
} else {
|
||||||
|
path.to_path_buf()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Refreshes the sources asynchronously
|
||||||
|
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
|
||||||
|
project: &Project,
|
||||||
|
sources: I,
|
||||||
|
refreshed_sources: &mut HashSet<PackageSources>,
|
||||||
|
) -> Result<(), Box<source::errors::RefreshError>> {
|
||||||
|
try_join_all(sources.map(|source| {
|
||||||
|
let needs_refresh = refreshed_sources.insert(source.clone());
|
||||||
|
async move {
|
||||||
|
if needs_refresh {
|
||||||
|
source.refresh(project).await.map_err(Box::new)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
/// Errors that can occur when using the pesde library
|
/// Errors that can occur when using the pesde library
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -270,8 +438,8 @@ pub mod errors {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum WorkspaceMembersError {
|
pub enum WorkspaceMembersError {
|
||||||
/// The manifest file could not be found
|
/// The manifest file could not be found
|
||||||
#[error("missing manifest file at {0}")]
|
#[error("missing manifest file")]
|
||||||
ManifestMissing(PathBuf, #[source] std::io::Error),
|
ManifestMissing(#[source] std::io::Error),
|
||||||
|
|
||||||
/// An error occurred deserializing the manifest file
|
/// An error occurred deserializing the manifest file
|
||||||
#[error("error deserializing manifest file at {0}")]
|
#[error("error deserializing manifest file at {0}")]
|
||||||
|
@ -281,12 +449,21 @@ pub mod errors {
|
||||||
#[error("error interacting with the filesystem")]
|
#[error("error interacting with the filesystem")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
/// An invalid glob pattern was found
|
|
||||||
#[error("invalid glob pattern")]
|
|
||||||
Glob(#[from] glob::PatternError),
|
|
||||||
|
|
||||||
/// An error occurred while globbing
|
/// An error occurred while globbing
|
||||||
#[error("error globbing")]
|
#[error("error globbing")]
|
||||||
Globbing(#[from] glob::GlobError),
|
Globbing(#[from] MatchingGlobsError),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when finding matching globs
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum MatchingGlobsError {
|
||||||
|
/// An error occurred interacting with the filesystem
|
||||||
|
#[error("error interacting with the filesystem")]
|
||||||
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
/// An error occurred while building a glob
|
||||||
|
#[error("error building glob")]
|
||||||
|
BuildGlob(#[from] wax::BuildError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,10 +69,29 @@ pub fn generate_lib_linking_module<I: IntoIterator<Item = S>, S: AsRef<str>>(
|
||||||
fn luau_style_path(path: &Path) -> String {
|
fn luau_style_path(path: &Path) -> String {
|
||||||
let path = path
|
let path = path
|
||||||
.components()
|
.components()
|
||||||
.filter_map(|ct| match ct {
|
.zip(
|
||||||
|
path.components()
|
||||||
|
.skip(1)
|
||||||
|
.map(Some)
|
||||||
|
.chain(std::iter::repeat(None)),
|
||||||
|
)
|
||||||
|
.filter_map(|(ct, next_ct)| match ct {
|
||||||
Component::CurDir => Some(".".to_string()),
|
Component::CurDir => Some(".".to_string()),
|
||||||
Component::ParentDir => Some("..".to_string()),
|
Component::ParentDir => Some("..".to_string()),
|
||||||
Component::Normal(part) => Some(format!("{}", part.to_string_lossy())),
|
Component::Normal(part) => {
|
||||||
|
let str = part.to_string_lossy();
|
||||||
|
|
||||||
|
Some(
|
||||||
|
(if next_ct.is_some() {
|
||||||
|
&str
|
||||||
|
} else {
|
||||||
|
str.strip_suffix(".luau")
|
||||||
|
.or_else(|| str.strip_suffix(".lua"))
|
||||||
|
.unwrap_or(&str)
|
||||||
|
})
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
|
@ -98,10 +117,10 @@ pub fn get_lib_require_path(
|
||||||
) -> Result<String, errors::GetLibRequirePath> {
|
) -> Result<String, errors::GetLibRequirePath> {
|
||||||
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||||
let path = if use_new_structure {
|
let path = if use_new_structure {
|
||||||
log::debug!("using new structure for require path with {:?}", lib_file);
|
tracing::debug!("using new structure for require path with {lib_file:?}");
|
||||||
lib_file.to_path(path)
|
lib_file.to_path(path)
|
||||||
} else {
|
} else {
|
||||||
log::debug!("using old structure for require path with {:?}", lib_file);
|
tracing::debug!("using old structure for require path with {lib_file:?}");
|
||||||
path
|
path
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -126,14 +145,26 @@ pub fn get_lib_require_path(
|
||||||
|
|
||||||
let path = path
|
let path = path
|
||||||
.components()
|
.components()
|
||||||
.filter_map(|component| match component {
|
.zip(
|
||||||
|
path.components()
|
||||||
|
.skip(1)
|
||||||
|
.map(Some)
|
||||||
|
.chain(std::iter::repeat(None)),
|
||||||
|
)
|
||||||
|
.filter_map(|(component, next_comp)| match component {
|
||||||
Component::ParentDir => Some(".Parent".to_string()),
|
Component::ParentDir => Some(".Parent".to_string()),
|
||||||
Component::Normal(part) if part != "init.lua" && part != "init.luau" => {
|
Component::Normal(part) if part != "init.lua" && part != "init.luau" => {
|
||||||
|
let str = part.to_string_lossy();
|
||||||
|
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"[{:?}]",
|
"[{:?}]",
|
||||||
part.to_string_lossy()
|
if next_comp.is_some() {
|
||||||
.trim_end_matches(".lua")
|
&str
|
||||||
.trim_end_matches(".luau")
|
} else {
|
||||||
|
str.strip_suffix(".luau")
|
||||||
|
.or_else(|| str.strip_suffix(".lua"))
|
||||||
|
.unwrap_or(&str)
|
||||||
|
}
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -168,12 +199,30 @@ pub fn get_bin_require_path(
|
||||||
luau_style_path(&path)
|
luau_style_path(&path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate a linking module for a script
|
||||||
|
pub fn generate_script_linking_module(require_path: &str) -> String {
|
||||||
|
format!(r#"return require({require_path})"#)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the require path for a script
|
||||||
|
pub fn get_script_require_path(
|
||||||
|
base_dir: &Path,
|
||||||
|
script_file: &RelativePathBuf,
|
||||||
|
destination_dir: &Path,
|
||||||
|
) -> String {
|
||||||
|
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||||
|
let path = script_file.to_path(path);
|
||||||
|
|
||||||
|
luau_style_path(&path)
|
||||||
|
}
|
||||||
|
|
||||||
/// Errors for the linking module utilities
|
/// Errors for the linking module utilities
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// An error occurred while getting the require path for a library
|
/// An error occurred while getting the require path for a library
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum GetLibRequirePath {
|
pub enum GetLibRequirePath {
|
||||||
/// The path for the RobloxPlaceKind could not be found
|
/// The path for the RobloxPlaceKind could not be found
|
||||||
#[error("could not find the path for the RobloxPlaceKind {0}")]
|
#[error("could not find the path for the RobloxPlaceKind {0}")]
|
||||||
|
|
|
@ -1,254 +1,363 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
linking::generator::get_file_types,
|
linking::generator::get_file_types,
|
||||||
lockfile::DownloadedGraph,
|
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||||
|
manifest::Manifest,
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
scripts::{execute_script, ScriptName},
|
scripts::{execute_script, ScriptName},
|
||||||
source::{fs::store_in_cas, traits::PackageRef, version_id::VersionId},
|
source::{
|
||||||
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME,
|
fs::{cas_path, store_in_cas},
|
||||||
|
traits::PackageRef,
|
||||||
|
version_id::VersionId,
|
||||||
|
},
|
||||||
|
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
|
use fs_err::tokio as fs;
|
||||||
|
use futures::future::try_join_all;
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::HashMap,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::create_dir_all,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
use tokio::task::spawn_blocking;
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
/// Generates linking modules for a project
|
/// Generates linking modules for a project
|
||||||
pub mod generator;
|
pub mod generator;
|
||||||
|
|
||||||
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
|
async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
|
||||||
let p = path.as_ref();
|
let p = path.as_ref();
|
||||||
create_dir_all(p)?;
|
fs::create_dir_all(p).await?;
|
||||||
p.canonicalize()
|
p.canonicalize()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||||
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1;
|
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
||||||
|
|
||||||
std::fs::hard_link(cas_path, destination)
|
match fs::remove_file(&destination).await {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
fs::hard_link(cas_path(&hash, cas_dir), destination).await
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Links the dependencies of the project
|
/// Links the dependencies of the project
|
||||||
pub fn link_dependencies(&self, graph: &DownloadedGraph) -> Result<(), errors::LinkingError> {
|
#[instrument(skip(self, graph), level = "debug")]
|
||||||
let manifest = self.deser_manifest()?;
|
pub async fn link_dependencies(
|
||||||
|
&self,
|
||||||
|
graph: &DownloadedGraph,
|
||||||
|
with_types: bool,
|
||||||
|
) -> Result<(), errors::LinkingError> {
|
||||||
|
let manifest = self.deser_manifest().await?;
|
||||||
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
let manifest = Arc::new(manifest);
|
||||||
|
|
||||||
let mut package_types = BTreeMap::<&PackageNames, BTreeMap<&VersionId, Vec<String>>>::new();
|
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
||||||
|
// we do this separately to allow the required tools for the scripts to be installed
|
||||||
|
self.link(graph, &manifest, &Arc::new(Default::default()), false)
|
||||||
|
.await?;
|
||||||
|
|
||||||
for (name, versions) in graph {
|
if !with_types {
|
||||||
for (version_id, node) in versions {
|
return Ok(());
|
||||||
let Some(lib_file) = node.target.lib_path() else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
let container_folder = node.node.container_folder(
|
|
||||||
&self
|
|
||||||
.package_dir()
|
|
||||||
.join(
|
|
||||||
manifest
|
|
||||||
.target
|
|
||||||
.kind()
|
|
||||||
.packages_folder(&node.node.pkg_ref.target_kind()),
|
|
||||||
)
|
|
||||||
.join(PACKAGES_CONTAINER_NAME),
|
|
||||||
name,
|
|
||||||
version_id.version(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
|
||||||
let lib_file = lib_file.to_path(&container_folder);
|
|
||||||
|
|
||||||
let contents = match std::fs::read_to_string(&lib_file) {
|
|
||||||
Ok(contents) => contents,
|
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(errors::LinkingError::LibFileNotFound(
|
|
||||||
lib_file.display().to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let types = match get_file_types(&contents) {
|
|
||||||
Ok(types) => types,
|
|
||||||
Err(e) => {
|
|
||||||
return Err(errors::LinkingError::FullMoon(
|
|
||||||
lib_file.display().to_string(),
|
|
||||||
e,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
log::debug!("{name}@{version_id} has {} exported types", types.len());
|
|
||||||
|
|
||||||
types
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
package_types
|
|
||||||
.entry(name)
|
|
||||||
.or_default()
|
|
||||||
.insert(version_id, types);
|
|
||||||
|
|
||||||
if let Some(build_files) = Some(&node.target)
|
|
||||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
|
||||||
.and_then(|t| t.build_files())
|
|
||||||
{
|
|
||||||
let script_name = ScriptName::RobloxSyncConfigGenerator.to_string();
|
|
||||||
|
|
||||||
let Some(script_path) = manifest.scripts.get(&script_name) else {
|
|
||||||
log::warn!("not having a `{script_name}` script in the manifest might cause issues with Roblox linking");
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
execute_script(
|
|
||||||
ScriptName::RobloxSyncConfigGenerator,
|
|
||||||
&script_path.to_path(self.package_dir()),
|
|
||||||
std::iter::once(container_folder.as_os_str())
|
|
||||||
.chain(build_files.iter().map(OsStr::new)),
|
|
||||||
self,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
.map_err(|e| {
|
|
||||||
errors::LinkingError::GenerateRobloxSyncConfig(
|
|
||||||
container_folder.display().to_string(),
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, versions) in graph {
|
// step 2. extract the types from libraries, prepare Roblox packages for syncing
|
||||||
for (version_id, node) in versions {
|
let roblox_sync_config_gen_script = manifest
|
||||||
let (node_container_folder, node_packages_folder) = {
|
.scripts
|
||||||
let base_folder = create_and_canonicalize(
|
.get(&ScriptName::RobloxSyncConfigGenerator.to_string());
|
||||||
self.package_dir().join(
|
|
||||||
manifest
|
let package_types = try_join_all(graph.iter().map(|(name, versions)| async move {
|
||||||
.target
|
Ok::<_, errors::LinkingError>((
|
||||||
.kind()
|
name,
|
||||||
.packages_folder(&node.node.pkg_ref.target_kind()),
|
try_join_all(versions.iter().map(|(version_id, node)| async move {
|
||||||
),
|
let Some(lib_file) = node.target.lib_path() else {
|
||||||
)?;
|
return Ok((version_id, vec![]));
|
||||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
};
|
||||||
|
|
||||||
let container_folder = node.node.container_folder(
|
let container_folder = node.node.container_folder(
|
||||||
&packages_container_folder,
|
&self
|
||||||
|
.package_dir()
|
||||||
|
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||||
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
name,
|
name,
|
||||||
version_id.version(),
|
version_id.version(),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((alias, _)) = &node.node.direct.as_ref() {
|
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
||||||
if let Some((lib_file, types)) =
|
let lib_file = lib_file.to_path(&container_folder);
|
||||||
node.target.lib_path().and_then(|lib_file| {
|
|
||||||
package_types
|
let contents = match fs::read_to_string(&lib_file).await {
|
||||||
.get(name)
|
Ok(contents) => contents,
|
||||||
.and_then(|v| v.get(version_id))
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
.map(|types| (lib_file, types))
|
return Err(errors::LinkingError::LibFileNotFound(
|
||||||
})
|
lib_file.display().to_string(),
|
||||||
{
|
));
|
||||||
write_cas(
|
}
|
||||||
base_folder.join(format!("{alias}.luau")),
|
Err(e) => return Err(e.into()),
|
||||||
self.cas_dir(),
|
|
||||||
&generator::generate_lib_linking_module(
|
|
||||||
&generator::get_lib_require_path(
|
|
||||||
&node.target.kind(),
|
|
||||||
&base_folder,
|
|
||||||
lib_file,
|
|
||||||
&container_folder,
|
|
||||||
node.node.pkg_ref.use_new_structure(),
|
|
||||||
&base_folder,
|
|
||||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
|
||||||
&manifest,
|
|
||||||
)?,
|
|
||||||
types,
|
|
||||||
),
|
|
||||||
)?;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(bin_file) = node.target.bin_path() {
|
let types = match spawn_blocking(move || get_file_types(&contents))
|
||||||
write_cas(
|
.await
|
||||||
base_folder.join(format!("{alias}.bin.luau")),
|
.unwrap()
|
||||||
self.cas_dir(),
|
{
|
||||||
&generator::generate_bin_linking_module(
|
Ok(types) => types,
|
||||||
&container_folder,
|
Err(e) => {
|
||||||
&generator::get_bin_require_path(
|
return Err(errors::LinkingError::FullMoon(
|
||||||
&base_folder,
|
lib_file.display().to_string(),
|
||||||
bin_file,
|
e,
|
||||||
&container_folder,
|
))
|
||||||
),
|
}
|
||||||
),
|
};
|
||||||
)?;
|
|
||||||
}
|
tracing::debug!("contains {} exported types", types.len());
|
||||||
|
|
||||||
|
types
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(build_files) = Some(&node.target)
|
||||||
|
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||||
|
.and_then(|t| t.build_files())
|
||||||
|
{
|
||||||
|
let Some(script_path) = roblox_sync_config_gen_script else {
|
||||||
|
tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
|
||||||
|
return Ok((version_id, types));
|
||||||
|
};
|
||||||
|
|
||||||
|
execute_script(
|
||||||
|
ScriptName::RobloxSyncConfigGenerator,
|
||||||
|
&script_path.to_path(self.package_dir()),
|
||||||
|
std::iter::once(container_folder.as_os_str())
|
||||||
|
.chain(build_files.iter().map(OsStr::new)),
|
||||||
|
self,
|
||||||
|
false,
|
||||||
|
).await
|
||||||
|
.map_err(|e| {
|
||||||
|
errors::LinkingError::GenerateRobloxSyncConfig(
|
||||||
|
container_folder.display().to_string(),
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
(container_folder, base_folder)
|
Ok((version_id, types))
|
||||||
};
|
}.instrument(tracing::debug_span!("extract types", name = name.to_string(), version_id = version_id.to_string()))))
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
|
))
|
||||||
|
}))
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
for (dependency_name, (dependency_version_id, dependency_alias)) in
|
// step 3. link all packages (and their dependencies), this time with types
|
||||||
&node.node.dependencies
|
self.link(graph, &manifest, &Arc::new(package_types), true)
|
||||||
{
|
.await
|
||||||
let Some(dependency_node) = graph
|
}
|
||||||
.get(dependency_name)
|
|
||||||
.and_then(|v| v.get(dependency_version_id))
|
|
||||||
else {
|
|
||||||
return Err(errors::LinkingError::DependencyNotFound(
|
|
||||||
dependency_name.to_string(),
|
|
||||||
dependency_version_id.to_string(),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(lib_file) = dependency_node.target.lib_path() else {
|
#[allow(clippy::too_many_arguments)]
|
||||||
continue;
|
async fn link_files(
|
||||||
};
|
&self,
|
||||||
|
base_folder: &Path,
|
||||||
|
container_folder: &Path,
|
||||||
|
root_container_folder: &Path,
|
||||||
|
relative_container_folder: &Path,
|
||||||
|
node: &DownloadedDependencyGraphNode,
|
||||||
|
name: &PackageNames,
|
||||||
|
version_id: &VersionId,
|
||||||
|
alias: &str,
|
||||||
|
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>,
|
||||||
|
manifest: &Manifest,
|
||||||
|
) -> Result<(), errors::LinkingError> {
|
||||||
|
static NO_TYPES: Vec<String> = Vec::new();
|
||||||
|
|
||||||
let base_folder = create_and_canonicalize(
|
if let Some(lib_file) = node.target.lib_path() {
|
||||||
self.package_dir().join(
|
let lib_module = generator::generate_lib_linking_module(
|
||||||
node.node
|
&generator::get_lib_require_path(
|
||||||
.pkg_ref
|
&node.target.kind(),
|
||||||
.target_kind()
|
base_folder,
|
||||||
.packages_folder(&dependency_node.node.pkg_ref.target_kind()),
|
lib_file,
|
||||||
),
|
container_folder,
|
||||||
)?;
|
node.node.pkg_ref.use_new_structure(),
|
||||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
root_container_folder,
|
||||||
|
relative_container_folder,
|
||||||
|
manifest,
|
||||||
|
)?,
|
||||||
|
package_types
|
||||||
|
.get(name)
|
||||||
|
.and_then(|v| v.get(version_id))
|
||||||
|
.unwrap_or(&NO_TYPES),
|
||||||
|
);
|
||||||
|
|
||||||
let container_folder = dependency_node.node.container_folder(
|
write_cas(
|
||||||
&packages_container_folder,
|
base_folder.join(format!("{alias}.luau")),
|
||||||
dependency_name,
|
self.cas_dir(),
|
||||||
dependency_version_id.version(),
|
&lib_module,
|
||||||
);
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let linker_folder = create_and_canonicalize(
|
if let Some(bin_file) = node.target.bin_path() {
|
||||||
node_container_folder
|
let bin_module = generator::generate_bin_linking_module(
|
||||||
.join(node.node.base_folder(dependency_node.target.kind())),
|
container_folder,
|
||||||
)?;
|
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
||||||
|
);
|
||||||
|
|
||||||
write_cas(
|
write_cas(
|
||||||
linker_folder.join(format!("{dependency_alias}.luau")),
|
base_folder.join(format!("{alias}.bin.luau")),
|
||||||
self.cas_dir(),
|
self.cas_dir(),
|
||||||
&generator::generate_lib_linking_module(
|
&bin_module,
|
||||||
&generator::get_lib_require_path(
|
)
|
||||||
&dependency_node.target.kind(),
|
.await?;
|
||||||
&linker_folder,
|
}
|
||||||
lib_file,
|
|
||||||
&container_folder,
|
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
|
||||||
dependency_node.node.pkg_ref.use_new_structure(),
|
let scripts_base =
|
||||||
&node_packages_folder,
|
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
||||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
.await?;
|
||||||
&manifest,
|
|
||||||
)?,
|
for (script_name, script_path) in scripts {
|
||||||
package_types
|
let script_module =
|
||||||
.get(dependency_name)
|
generator::generate_script_linking_module(&generator::get_script_require_path(
|
||||||
.and_then(|v| v.get(dependency_version_id))
|
&scripts_base,
|
||||||
.unwrap(),
|
script_path,
|
||||||
),
|
container_folder,
|
||||||
)?;
|
));
|
||||||
}
|
|
||||||
|
write_cas(
|
||||||
|
scripts_base.join(format!("{script_name}.luau")),
|
||||||
|
self.cas_dir(),
|
||||||
|
&script_module,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn link(
|
||||||
|
&self,
|
||||||
|
graph: &DownloadedGraph,
|
||||||
|
manifest: &Arc<Manifest>,
|
||||||
|
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
|
||||||
|
is_complete: bool,
|
||||||
|
) -> Result<(), errors::LinkingError> {
|
||||||
|
try_join_all(graph.iter().flat_map(|(name, versions)| {
|
||||||
|
versions.iter().map(|(version_id, node)| {
|
||||||
|
let name = name.clone();
|
||||||
|
let manifest = manifest.clone();
|
||||||
|
let package_types = package_types.clone();
|
||||||
|
|
||||||
|
let span = tracing::info_span!(
|
||||||
|
"link",
|
||||||
|
name = name.to_string(),
|
||||||
|
version_id = version_id.to_string()
|
||||||
|
);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let (node_container_folder, node_packages_folder) = {
|
||||||
|
let base_folder = create_and_canonicalize(
|
||||||
|
self.package_dir()
|
||||||
|
.join(manifest.target.kind().packages_folder(version_id.target())),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||||
|
|
||||||
|
let container_folder = node.node.container_folder(
|
||||||
|
&packages_container_folder,
|
||||||
|
&name,
|
||||||
|
version_id.version(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some((alias, _, _)) = &node.node.direct {
|
||||||
|
self.link_files(
|
||||||
|
&base_folder,
|
||||||
|
&container_folder,
|
||||||
|
&base_folder,
|
||||||
|
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||||
|
node,
|
||||||
|
&name,
|
||||||
|
version_id,
|
||||||
|
alias,
|
||||||
|
&package_types,
|
||||||
|
&manifest,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
(container_folder, base_folder)
|
||||||
|
};
|
||||||
|
|
||||||
|
for (dependency_name, (dependency_version_id, dependency_alias)) in
|
||||||
|
&node.node.dependencies
|
||||||
|
{
|
||||||
|
let Some(dependency_node) = graph
|
||||||
|
.get(dependency_name)
|
||||||
|
.and_then(|v| v.get(dependency_version_id))
|
||||||
|
else {
|
||||||
|
if is_complete {
|
||||||
|
return Err(errors::LinkingError::DependencyNotFound(
|
||||||
|
format!("{dependency_name}@{dependency_version_id}"),
|
||||||
|
format!("{name}@{version_id}"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let base_folder = create_and_canonicalize(
|
||||||
|
self.package_dir().join(
|
||||||
|
version_id
|
||||||
|
.target()
|
||||||
|
.packages_folder(dependency_version_id.target()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||||
|
|
||||||
|
let container_folder = dependency_node.node.container_folder(
|
||||||
|
&packages_container_folder,
|
||||||
|
dependency_name,
|
||||||
|
dependency_version_id.version(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let linker_folder = create_and_canonicalize(
|
||||||
|
node_container_folder.join(
|
||||||
|
node.node
|
||||||
|
.base_folder(version_id, dependency_node.target.kind()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
self.link_files(
|
||||||
|
&linker_folder,
|
||||||
|
&container_folder,
|
||||||
|
&node_packages_folder,
|
||||||
|
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||||
|
dependency_node,
|
||||||
|
dependency_name,
|
||||||
|
dependency_version_id,
|
||||||
|
dependency_alias,
|
||||||
|
&package_types,
|
||||||
|
&manifest,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.instrument(span)
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur while linking dependencies
|
/// Errors that can occur while linking dependencies
|
||||||
|
@ -268,7 +377,7 @@ pub mod errors {
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
/// A dependency was not found
|
/// A dependency was not found
|
||||||
#[error("dependency not found: {0}@{1}")]
|
#[error("dependency `{0}` of `{1}` not found")]
|
||||||
DependencyNotFound(String, String),
|
DependencyNotFound(String, String),
|
||||||
|
|
||||||
/// The library file was not found
|
/// The library file was not found
|
||||||
|
|
|
@ -14,7 +14,7 @@ use relative_path::RelativePathBuf;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{btree_map::Entry, BTreeMap},
|
collections::BTreeMap,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -24,22 +24,25 @@ pub type Graph<Node> = BTreeMap<PackageNames, BTreeMap<VersionId, Node>>;
|
||||||
/// A dependency graph node
|
/// A dependency graph node
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct DependencyGraphNode {
|
pub struct DependencyGraphNode {
|
||||||
/// The alias and specifiers for the dependency, if it is a direct dependency (i.e. used by the current project)
|
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub direct: Option<(String, DependencySpecifiers)>,
|
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
||||||
/// The dependencies of the package
|
/// The dependencies of the package
|
||||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
|
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
|
||||||
/// The type of the dependency
|
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||||
pub ty: DependencyType,
|
pub resolved_ty: DependencyType,
|
||||||
|
/// Whether the resolved type should be Peer if this isn't depended on
|
||||||
|
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||||
|
pub is_peer: bool,
|
||||||
/// The package reference
|
/// The package reference
|
||||||
pub pkg_ref: PackageRefs,
|
pub pkg_ref: PackageRefs,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DependencyGraphNode {
|
impl DependencyGraphNode {
|
||||||
pub(crate) fn base_folder(&self, project_target: TargetKind) -> String {
|
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
|
||||||
if self.pkg_ref.use_new_structure() {
|
if self.pkg_ref.use_new_structure() {
|
||||||
self.pkg_ref.target_kind().packages_folder(&project_target)
|
version_id.target().packages_folder(&project_target)
|
||||||
} else {
|
} else {
|
||||||
"..".to_string()
|
"..".to_string()
|
||||||
}
|
}
|
||||||
|
@ -52,6 +55,18 @@ impl DependencyGraphNode {
|
||||||
name: &PackageNames,
|
name: &PackageNames,
|
||||||
version: &Version,
|
version: &Version,
|
||||||
) -> PathBuf {
|
) -> PathBuf {
|
||||||
|
if self.pkg_ref.like_wally() {
|
||||||
|
return path
|
||||||
|
.as_ref()
|
||||||
|
.join(format!(
|
||||||
|
"{}_{}@{}",
|
||||||
|
name.as_str().0,
|
||||||
|
name.as_str().1,
|
||||||
|
version
|
||||||
|
))
|
||||||
|
.join(name.as_str().1);
|
||||||
|
}
|
||||||
|
|
||||||
path.as_ref()
|
path.as_ref()
|
||||||
.join(name.escaped())
|
.join(name.escaped())
|
||||||
.join(version.to_string())
|
.join(version.to_string())
|
||||||
|
@ -62,45 +77,6 @@ impl DependencyGraphNode {
|
||||||
/// A graph of `DependencyGraphNode`s
|
/// A graph of `DependencyGraphNode`s
|
||||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||||
|
|
||||||
pub(crate) fn insert_node(
|
|
||||||
graph: &mut DependencyGraph,
|
|
||||||
name: PackageNames,
|
|
||||||
version: VersionId,
|
|
||||||
mut node: DependencyGraphNode,
|
|
||||||
is_top_level: bool,
|
|
||||||
) {
|
|
||||||
if !is_top_level && node.direct.take().is_some() {
|
|
||||||
log::debug!(
|
|
||||||
"tried to insert {name}@{version} as direct dependency from a non top-level context",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
match graph
|
|
||||||
.entry(name.clone())
|
|
||||||
.or_default()
|
|
||||||
.entry(version.clone())
|
|
||||||
{
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(node);
|
|
||||||
}
|
|
||||||
Entry::Occupied(existing) => {
|
|
||||||
let current_node = existing.into_mut();
|
|
||||||
|
|
||||||
match (¤t_node.direct, &node.direct) {
|
|
||||||
(Some(_), Some(_)) => {
|
|
||||||
log::warn!("duplicate direct dependency for {name}@{version}",);
|
|
||||||
}
|
|
||||||
|
|
||||||
(None, Some(_)) => {
|
|
||||||
current_node.direct = node.direct;
|
|
||||||
}
|
|
||||||
|
|
||||||
(_, _) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct DownloadedDependencyGraphNode {
|
pub struct DownloadedDependencyGraphNode {
|
||||||
|
|
282
src/main.rs
|
@ -1,32 +1,37 @@
|
||||||
|
#[cfg(feature = "version-management")]
|
||||||
|
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
|
||||||
|
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Parser;
|
use clap::{builder::styling::AnsiColor, Parser};
|
||||||
use colored::Colorize;
|
use fs_err::tokio as fs;
|
||||||
use indicatif::MultiProgress;
|
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||||
use indicatif_log_bridge::LogWrapper;
|
|
||||||
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
fs::create_dir_all,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
use tempfile::NamedTempFile;
|
||||||
#[cfg(feature = "version-management")]
|
use tracing::instrument;
|
||||||
use crate::cli::version::{
|
use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
|
||||||
check_for_updates, current_version, get_or_download_version, max_installed_version,
|
use tracing_subscriber::{
|
||||||
};
|
filter::LevelFilter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer,
|
||||||
use crate::cli::{
|
|
||||||
auth::get_token, config::read_config, home_dir, repos::update_repo_dependencies, HOME_DIR,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
mod cli;
|
mod cli;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
|
const STYLES: clap::builder::Styles = clap::builder::Styles::styled()
|
||||||
|
.header(AnsiColor::Yellow.on_default().underline())
|
||||||
|
.usage(AnsiColor::Yellow.on_default().underline())
|
||||||
|
.literal(AnsiColor::Green.on_default().bold())
|
||||||
|
.placeholder(AnsiColor::Cyan.on_default());
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[clap(
|
#[clap(
|
||||||
version,
|
version,
|
||||||
about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
about = "A package manager for the Luau programming language",
|
||||||
|
long_about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||||
)]
|
)]
|
||||||
#[command(disable_version_flag = true)]
|
#[command(disable_version_flag = true, styles = STYLES)]
|
||||||
struct Cli {
|
struct Cli {
|
||||||
/// Print version
|
/// Print version
|
||||||
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
|
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
|
||||||
|
@ -36,40 +41,54 @@ struct Cli {
|
||||||
subcommand: cli::commands::Subcommand,
|
subcommand: cli::commands::Subcommand,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[instrument(level = "trace")]
|
||||||
fn get_root(path: &std::path::Path) -> PathBuf {
|
async fn get_linkable_dir(path: &Path) -> PathBuf {
|
||||||
match path.components().next().unwrap() {
|
let mut curr_path = PathBuf::new();
|
||||||
std::path::Component::Prefix(prefix) => {
|
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
|
||||||
let mut string = prefix.as_os_str().to_string_lossy().to_string();
|
|
||||||
if string.ends_with(':') {
|
let temp_path = tempfile::Builder::new()
|
||||||
string.push(std::path::MAIN_SEPARATOR);
|
.make(|_| Ok(()))
|
||||||
|
.expect("failed to create temporary file")
|
||||||
|
.into_temp_path();
|
||||||
|
let temp_file_name = temp_path.file_name().expect("failed to get file name");
|
||||||
|
|
||||||
|
// C: and \ are different components on Windows
|
||||||
|
#[cfg(windows)]
|
||||||
|
let components = path.components().map(|c| {
|
||||||
|
let mut path = c.as_os_str().to_os_string();
|
||||||
|
if let std::path::Component::Prefix(_) = c {
|
||||||
|
path.push(std::path::MAIN_SEPARATOR_STR);
|
||||||
|
}
|
||||||
|
|
||||||
|
path
|
||||||
|
});
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let components = path.components().map(|c| c.as_os_str().to_os_string());
|
||||||
|
|
||||||
|
for component in components {
|
||||||
|
curr_path.push(component);
|
||||||
|
|
||||||
|
let try_path = curr_path.join(temp_file_name);
|
||||||
|
|
||||||
|
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
|
||||||
|
if let Err(err) = fs::remove_file(&try_path).await {
|
||||||
|
tracing::warn!(
|
||||||
|
"failed to remove temporary file at {}: {err}",
|
||||||
|
try_path.display()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::path::PathBuf::from(&string)
|
return curr_path;
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
fn get_root(path: &std::path::Path) -> PathBuf {
|
|
||||||
use std::os::unix::fs::MetadataExt;
|
|
||||||
|
|
||||||
let path = std::fs::canonicalize(path).unwrap();
|
|
||||||
let mut current = path.as_path();
|
|
||||||
|
|
||||||
while let Some(parent) = current.parent() {
|
|
||||||
if std::fs::metadata(parent).unwrap().dev() != std::fs::metadata(current).unwrap().dev() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
current = parent;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
current.to_path_buf()
|
panic!(
|
||||||
|
"couldn't find a linkable directory for any point in {}",
|
||||||
|
curr_path.display()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run() -> anyhow::Result<()> {
|
async fn run() -> anyhow::Result<()> {
|
||||||
let cwd = std::env::current_dir().expect("failed to get current working directory");
|
let cwd = std::env::current_dir().expect("failed to get current working directory");
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
|
@ -99,7 +118,12 @@ fn run() -> anyhow::Result<()> {
|
||||||
// on unix systems
|
// on unix systems
|
||||||
let status = std::process::Command::new("lune")
|
let status = std::process::Command::new("lune")
|
||||||
.arg("run")
|
.arg("run")
|
||||||
.arg(exe.with_extension(""))
|
.arg(
|
||||||
|
exe.parent()
|
||||||
|
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
|
||||||
|
.unwrap_or(exe)
|
||||||
|
.with_extension("luau"),
|
||||||
|
)
|
||||||
.arg("--")
|
.arg("--")
|
||||||
.args(std::env::args_os().skip(1))
|
.args(std::env::args_os().skip(1))
|
||||||
.current_dir(cwd)
|
.current_dir(cwd)
|
||||||
|
@ -109,13 +133,47 @@ fn run() -> anyhow::Result<()> {
|
||||||
std::process::exit(status.code().unwrap());
|
std::process::exit(status.code().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let indicatif_layer = IndicatifLayer::new().with_filter(IndicatifFilter::new(false));
|
||||||
|
|
||||||
|
let tracing_env_filter = EnvFilter::builder()
|
||||||
|
.with_default_directive(LevelFilter::INFO.into())
|
||||||
|
.from_env_lossy()
|
||||||
|
.add_directive("reqwest=info".parse().unwrap())
|
||||||
|
.add_directive("rustls=info".parse().unwrap())
|
||||||
|
.add_directive("tokio_util=info".parse().unwrap())
|
||||||
|
.add_directive("goblin=info".parse().unwrap())
|
||||||
|
.add_directive("tower=info".parse().unwrap())
|
||||||
|
.add_directive("hyper=info".parse().unwrap())
|
||||||
|
.add_directive("h2=info".parse().unwrap());
|
||||||
|
|
||||||
|
let fmt_layer =
|
||||||
|
tracing_subscriber::fmt::layer().with_writer(indicatif_layer.inner().get_stderr_writer());
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let fmt_layer = fmt_layer.with_timer(tracing_subscriber::fmt::time::uptime());
|
||||||
|
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
let fmt_layer = fmt_layer
|
||||||
|
.pretty()
|
||||||
|
.with_timer(())
|
||||||
|
.with_line_number(false)
|
||||||
|
.with_file(false)
|
||||||
|
.with_target(false);
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(tracing_env_filter)
|
||||||
|
.with(fmt_layer)
|
||||||
|
.with(indicatif_layer)
|
||||||
|
.init();
|
||||||
|
|
||||||
let (project_root_dir, project_workspace_dir) = 'finder: {
|
let (project_root_dir, project_workspace_dir) = 'finder: {
|
||||||
let mut current_path = Some(cwd.clone());
|
let mut current_path = Some(cwd.clone());
|
||||||
let mut project_root = None::<PathBuf>;
|
let mut project_root = None::<PathBuf>;
|
||||||
let mut workspace_dir = None::<PathBuf>;
|
let mut workspace_dir = None::<PathBuf>;
|
||||||
|
|
||||||
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
|
async fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
|
||||||
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
||||||
|
.await
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
let manifest: pesde::manifest::Manifest =
|
let manifest: pesde::manifest::Manifest =
|
||||||
toml::from_str(&manifest).context("failed to parse manifest")?;
|
toml::from_str(&manifest).context("failed to parse manifest")?;
|
||||||
|
@ -124,17 +182,14 @@ fn run() -> anyhow::Result<()> {
|
||||||
return Ok(HashSet::new());
|
return Ok(HashSet::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
manifest
|
matching_globs(
|
||||||
.workspace_members
|
path,
|
||||||
.iter()
|
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||||
.map(|member| path.join(member))
|
false,
|
||||||
.map(|p| glob::glob(&p.to_string_lossy()))
|
false,
|
||||||
.collect::<Result<Vec<_>, _>>()
|
)
|
||||||
.context("invalid glob patterns")?
|
.await
|
||||||
.into_iter()
|
.context("failed to get workspace members")
|
||||||
.flat_map(|paths| paths.into_iter())
|
|
||||||
.collect::<Result<HashSet<_>, _>>()
|
|
||||||
.context("failed to expand glob patterns")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
while let Some(path) = current_path {
|
while let Some(path) = current_path {
|
||||||
|
@ -150,13 +205,13 @@ fn run() -> anyhow::Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
(Some(project_root), None) => {
|
(Some(project_root), None) => {
|
||||||
if get_workspace_members(&path)?.contains(project_root) {
|
if get_workspace_members(&path).await?.contains(project_root) {
|
||||||
workspace_dir = Some(path);
|
workspace_dir = Some(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(None, None) => {
|
(None, None) => {
|
||||||
if get_workspace_members(&path)?.contains(&cwd) {
|
if get_workspace_members(&path).await?.contains(&cwd) {
|
||||||
// initializing a new member of a workspace
|
// initializing a new member of a workspace
|
||||||
break 'finder (cwd, Some(path));
|
break 'finder (cwd, Some(path));
|
||||||
} else {
|
} else {
|
||||||
|
@ -173,49 +228,41 @@ fn run() -> anyhow::Result<()> {
|
||||||
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
|
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
|
||||||
};
|
};
|
||||||
|
|
||||||
let multi = {
|
tracing::trace!(
|
||||||
let logger = pretty_env_logger::formatted_builder()
|
"project root: {}\nworkspace root: {}",
|
||||||
.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"))
|
project_root_dir.display(),
|
||||||
.build();
|
project_workspace_dir
|
||||||
let multi = MultiProgress::new();
|
.as_ref()
|
||||||
|
.map_or("none".to_string(), |p| p.display().to_string())
|
||||||
|
);
|
||||||
|
|
||||||
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
|
let home_dir = home_dir()?;
|
||||||
|
let data_dir = home_dir.join("data");
|
||||||
|
fs::create_dir_all(&data_dir)
|
||||||
|
.await
|
||||||
|
.expect("failed to create data directory");
|
||||||
|
|
||||||
multi
|
let cas_dir = get_linkable_dir(&project_root_dir).await.join(HOME_DIR);
|
||||||
};
|
|
||||||
|
|
||||||
let data_dir = home_dir()?.join("data");
|
let cas_dir = if cas_dir == home_dir {
|
||||||
create_dir_all(&data_dir).expect("failed to create data directory");
|
&data_dir
|
||||||
|
|
||||||
let token = get_token()?;
|
|
||||||
|
|
||||||
let home_cas_dir = data_dir.join("cas");
|
|
||||||
create_dir_all(&home_cas_dir).expect("failed to create cas directory");
|
|
||||||
let project_root = get_root(&project_root_dir);
|
|
||||||
let cas_dir = if get_root(&home_cas_dir) == project_root {
|
|
||||||
home_cas_dir
|
|
||||||
} else {
|
} else {
|
||||||
project_root.join(HOME_DIR).join("cas")
|
&cas_dir
|
||||||
};
|
}
|
||||||
|
.join("cas");
|
||||||
|
|
||||||
|
tracing::debug!("using cas dir in {}", cas_dir.display());
|
||||||
|
|
||||||
let project = Project::new(
|
let project = Project::new(
|
||||||
project_root_dir,
|
project_root_dir,
|
||||||
project_workspace_dir,
|
project_workspace_dir,
|
||||||
data_dir,
|
data_dir,
|
||||||
cas_dir,
|
cas_dir,
|
||||||
AuthConfig::new()
|
AuthConfig::new().with_tokens(get_tokens().await?.0),
|
||||||
.with_default_token(token.clone())
|
|
||||||
.with_token_overrides(read_config()?.token_overrides),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let reqwest = {
|
let reqwest = {
|
||||||
let mut headers = reqwest::header::HeaderMap::new();
|
let mut headers = reqwest::header::HeaderMap::new();
|
||||||
if let Some(token) = token {
|
|
||||||
headers.insert(
|
|
||||||
reqwest::header::AUTHORIZATION,
|
|
||||||
token.parse().context("failed to create auth header")?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
headers.insert(
|
headers.insert(
|
||||||
reqwest::header::ACCEPT,
|
reqwest::header::ACCEPT,
|
||||||
|
@ -224,7 +271,7 @@ fn run() -> anyhow::Result<()> {
|
||||||
.context("failed to create accept header")?,
|
.context("failed to create accept header")?,
|
||||||
);
|
);
|
||||||
|
|
||||||
reqwest::blocking::Client::builder()
|
reqwest::Client::builder()
|
||||||
.user_agent(concat!(
|
.user_agent(concat!(
|
||||||
env!("CARGO_PKG_NAME"),
|
env!("CARGO_PKG_NAME"),
|
||||||
"/",
|
"/",
|
||||||
|
@ -238,22 +285,15 @@ fn run() -> anyhow::Result<()> {
|
||||||
{
|
{
|
||||||
let target_version = project
|
let target_version = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
.await
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|manifest| manifest.pesde_version);
|
.and_then(|manifest| manifest.pesde_version);
|
||||||
|
|
||||||
// store the current version in case it needs to be used later
|
|
||||||
get_or_download_version(&reqwest, ¤t_version())?;
|
|
||||||
|
|
||||||
let exe_path = if let Some(version) = target_version {
|
let exe_path = if let Some(version) = target_version {
|
||||||
Some(get_or_download_version(&reqwest, &version)?)
|
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await?
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let exe_path = if let Some(exe_path) = exe_path {
|
|
||||||
exe_path
|
|
||||||
} else {
|
|
||||||
get_or_download_version(&reqwest, &max_installed_version()?)?
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(exe_path) = exe_path {
|
if let Some(exe_path) = exe_path {
|
||||||
let status = std::process::Command::new(exe_path)
|
let status = std::process::Command::new(exe_path)
|
||||||
|
@ -264,50 +304,20 @@ fn run() -> anyhow::Result<()> {
|
||||||
std::process::exit(status.code().unwrap());
|
std::process::exit(status.code().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
display_err(check_for_updates(&reqwest), " while checking for updates");
|
display_err(
|
||||||
|
check_for_updates(&reqwest).await,
|
||||||
|
" while checking for updates",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
display_err(
|
let cli = Cli::parse();
|
||||||
update_repo_dependencies(&project),
|
|
||||||
" while updating repository dependencies",
|
|
||||||
);
|
|
||||||
|
|
||||||
Cli::parse().subcommand.run(project, multi, reqwest)
|
cli.subcommand.run(project, reqwest).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn display_err(result: anyhow::Result<()>, prefix: &str) {
|
#[tokio::main]
|
||||||
if let Err(err) = result {
|
async fn main() {
|
||||||
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
|
let result = run().await;
|
||||||
|
|
||||||
let cause = err.chain().skip(1).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if !cause.is_empty() {
|
|
||||||
eprintln!("{}:", "caused by".red().bold());
|
|
||||||
for err in cause {
|
|
||||||
eprintln!(" - {err}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let backtrace = err.backtrace();
|
|
||||||
match backtrace.status() {
|
|
||||||
std::backtrace::BacktraceStatus::Disabled => {
|
|
||||||
eprintln!(
|
|
||||||
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
|
|
||||||
"help".yellow().bold()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
std::backtrace::BacktraceStatus::Captured => {
|
|
||||||
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let result = run();
|
|
||||||
let is_err = result.is_err();
|
let is_err = result.is_err();
|
||||||
display_err(result, "");
|
display_err(result, "");
|
||||||
if is_err {
|
if is_err {
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
|
||||||
|
|
||||||
use relative_path::RelativePathBuf;
|
|
||||||
use semver::Version;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{overrides::OverrideKey, target::Target},
|
manifest::{overrides::OverrideKey, target::Target},
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::specifiers::DependencySpecifiers,
|
source::specifiers::DependencySpecifiers,
|
||||||
};
|
};
|
||||||
|
use relative_path::RelativePathBuf;
|
||||||
|
use semver::Version;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// Overrides
|
/// Overrides
|
||||||
pub mod overrides;
|
pub mod overrides;
|
||||||
|
@ -45,7 +44,7 @@ pub struct Manifest {
|
||||||
/// The indices to use for the package
|
/// The indices to use for the package
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
serialize_with = "crate::util::serialize_gix_url_map",
|
skip_serializing,
|
||||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||||
)]
|
)]
|
||||||
pub indices: BTreeMap<String, gix::Url>,
|
pub indices: BTreeMap<String, gix::Url>,
|
||||||
|
@ -53,8 +52,7 @@ pub struct Manifest {
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
skip_serializing_if = "BTreeMap::is_empty",
|
skip_serializing,
|
||||||
serialize_with = "crate::util::serialize_gix_url_map",
|
|
||||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||||
)]
|
)]
|
||||||
pub wally_indices: BTreeMap<String, gix::Url>,
|
pub wally_indices: BTreeMap<String, gix::Url>,
|
||||||
|
@ -63,7 +61,7 @@ pub struct Manifest {
|
||||||
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
|
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
|
||||||
/// The files to include in the package
|
/// The files to include in the package
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub includes: BTreeSet<String>,
|
pub includes: Vec<String>,
|
||||||
/// The patches to apply to packages
|
/// The patches to apply to packages
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
#[serde(default, skip_serializing)]
|
#[serde(default, skip_serializing)]
|
||||||
|
@ -90,6 +88,9 @@ pub struct Manifest {
|
||||||
/// The dev dependencies of the package
|
/// The dev dependencies of the package
|
||||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
|
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
|
||||||
|
/// The user-defined fields of the package
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub user_defined_fields: HashMap<String, toml::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A dependency type
|
/// A dependency type
|
||||||
|
@ -106,6 +107,7 @@ pub enum DependencyType {
|
||||||
|
|
||||||
impl Manifest {
|
impl Manifest {
|
||||||
/// Get all dependencies from the manifest
|
/// Get all dependencies from the manifest
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub fn all_dependencies(
|
pub fn all_dependencies(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
|
|
|
@ -2,7 +2,7 @@ use relative_path::RelativePathBuf;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeSet,
|
collections::{BTreeMap, BTreeSet},
|
||||||
fmt::{Display, Formatter},
|
fmt::{Display, Formatter},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
@ -68,6 +68,11 @@ impl TargetKind {
|
||||||
|
|
||||||
format!("{dependency}_packages")
|
format!("{dependency}_packages")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether this target is a Roblox target
|
||||||
|
pub fn is_roblox(&self) -> bool {
|
||||||
|
matches!(self, TargetKind::Roblox | TargetKind::RobloxServer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A target of a package
|
/// A target of a package
|
||||||
|
@ -77,7 +82,7 @@ pub enum Target {
|
||||||
/// A Roblox target
|
/// A Roblox target
|
||||||
Roblox {
|
Roblox {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The files to include in the sync tool's config
|
/// The files to include in the sync tool's config
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -86,7 +91,7 @@ pub enum Target {
|
||||||
/// A Roblox server target
|
/// A Roblox server target
|
||||||
RobloxServer {
|
RobloxServer {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The files to include in the sync tool's config
|
/// The files to include in the sync tool's config
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -95,20 +100,26 @@ pub enum Target {
|
||||||
/// A Lune target
|
/// A Lune target
|
||||||
Lune {
|
Lune {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The path to the bin export file
|
/// The path to the bin export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
bin: Option<RelativePathBuf>,
|
bin: Option<RelativePathBuf>,
|
||||||
|
/// The exported scripts
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
scripts: BTreeMap<String, RelativePathBuf>,
|
||||||
},
|
},
|
||||||
/// A Luau target
|
/// A Luau target
|
||||||
Luau {
|
Luau {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The path to the bin export file
|
/// The path to the bin export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
bin: Option<RelativePathBuf>,
|
bin: Option<RelativePathBuf>,
|
||||||
|
/// The exported scripts
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
scripts: BTreeMap<String, RelativePathBuf>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,6 +162,15 @@ impl Target {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the scripts exported by this target
|
||||||
|
pub fn scripts(&self) -> Option<&BTreeMap<String, RelativePathBuf>> {
|
||||||
|
match self {
|
||||||
|
Target::Lune { scripts, .. } => Some(scripts),
|
||||||
|
Target::Luau { scripts, .. } => Some(scripts),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Target {
|
impl Display for Target {
|
||||||
|
|