mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-06 03:40:59 +01:00
Compare commits
72 commits
v0.5.0-rc.
...
0.5
Author | SHA1 | Date | |
---|---|---|---|
|
32906400ec | ||
|
5c2f831c26 | ||
|
97d9251f69 | ||
|
89a2103164 | ||
|
0c159e7689 | ||
|
4f75af88b7 | ||
|
f009c957ca | ||
3569ff32cd | |||
|
c3e764ddda | ||
|
db3335bbf7 | ||
|
711b0009cb | ||
|
f88b800d51 | ||
|
28df3bcca4 | ||
|
0f74e2efa3 | ||
|
a6c1108d5b | ||
|
9535175a45 | ||
|
d9d27cf45b | ||
|
60fb68fcf3 | ||
|
78976834b2 | ||
|
52603ea43e | ||
|
0dde647042 | ||
|
3196a83b25 | ||
|
d387c27f16 | ||
|
a6846597ca | ||
|
3810a3b9ff | ||
|
52c502359b | ||
|
7d1e20da8c | ||
|
d35f34e8f0 | ||
|
9ee75ec9c9 | ||
|
919b0036e5 | ||
|
7466131f04 | ||
|
0be7dd4d0e | ||
|
f8d0bc6c4d | ||
|
381740d2ce | ||
|
a7ea8eb9c1 | ||
|
4a3619c26e | ||
|
16ab05ec72 | ||
|
36e6f16ca6 | ||
|
4843424dba | ||
|
e51bc9f9bb | ||
|
6d8731f1e5 | ||
|
49a42dc931 | ||
|
13594d6103 | ||
|
eab46e4ee5 | ||
|
7311427518 | ||
|
c94f0e55ec | ||
|
15af291f84 | ||
|
2b2d280fe0 | ||
|
0fa17a839f | ||
|
e30ec8a6cf | ||
|
f6fce8be9e | ||
|
4d3ddd50cb | ||
|
5513ef41a3 | ||
|
ac74c57709 | ||
|
5ba8c5dbb4 | ||
|
7b592bb719 | ||
|
f7d2d7cbb0 | ||
|
91a3a9b122 | ||
|
b53457c42c | ||
|
a4162cd300 | ||
|
e807c261a2 | ||
|
11a356c99a | ||
|
af30701a21 | ||
|
81ecd02df2 | ||
|
70f3bec275 | ||
|
385e36f1e4 | ||
|
f69c05a05a | ||
|
564d9de675 | ||
|
e5e2bbeeb4 | ||
|
f0aafe212d | ||
|
9b31718a0e | ||
|
083bf3badd |
80 changed files with 3199 additions and 2000 deletions
79
.github/workflows/debug.yml
vendored
Normal file
79
.github/workflows/debug.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
name: Debug
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
get-version:
|
||||||
|
name: Get build version
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: v${{ steps.get_version.outputs.value }}+rev.g${{ steps.trim_sha.outputs.trimmed_sha }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Get package version
|
||||||
|
uses: SebRollen/toml-action@v1.2.0
|
||||||
|
id: get_version
|
||||||
|
with:
|
||||||
|
file: Cargo.toml
|
||||||
|
field: package.version
|
||||||
|
|
||||||
|
- name: Trim commit SHA
|
||||||
|
id: trim_sha
|
||||||
|
run: |
|
||||||
|
commit_sha=${{ github.sha }}
|
||||||
|
echo "trimmed_sha=${commit_sha:0:7}" | tee $GITHUB_OUTPUT
|
||||||
|
build:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- job-name: windows-x86_64
|
||||||
|
target: x86_64-pc-windows-msvc
|
||||||
|
runs-on: windows-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-windows-x86_64
|
||||||
|
|
||||||
|
- job-name: linux-x86_64
|
||||||
|
target: x86_64-unknown-linux-gnu
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
|
||||||
|
|
||||||
|
- job-name: macos-x86_64
|
||||||
|
target: x86_64-apple-darwin
|
||||||
|
runs-on: macos-13
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-x86_64
|
||||||
|
|
||||||
|
- job-name: macos-aarch64
|
||||||
|
target: aarch64-apple-darwin
|
||||||
|
runs-on: macos-latest
|
||||||
|
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-aarch64
|
||||||
|
|
||||||
|
name: Build for ${{ matrix.job-name }}
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
needs: get-version
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Linux build dependencies
|
||||||
|
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install libdbus-1-dev pkg-config
|
||||||
|
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
|
- name: Compile in debug mode
|
||||||
|
run: cargo build --bins --no-default-features --features bin,patches,wally-compat --target ${{ matrix.target }} --locked
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.artifact-name }}
|
||||||
|
if-no-files-found: error
|
||||||
|
path: |
|
||||||
|
target/${{ matrix.target }}/debug/pesde.exe
|
||||||
|
target/${{ matrix.target }}/debug/pesde
|
50
.github/workflows/release.yaml
vendored
50
.github/workflows/release.yaml
vendored
|
@ -4,8 +4,44 @@ on:
|
||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
env:
|
env:
|
||||||
|
CRATE_NAME: pesde
|
||||||
BIN_NAME: pesde
|
BIN_NAME: pesde
|
||||||
jobs:
|
jobs:
|
||||||
|
prepare:
|
||||||
|
name: Prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.extract_version.outputs.VERSION }}
|
||||||
|
found: ${{ steps.ensure_not_published.outputs.FOUND }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Extract version
|
||||||
|
id: extract_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION=$(echo ${{ github.ref_name }} | cut -d'+' -f1 | cut -c 2-)
|
||||||
|
echo "VERSION=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Ensure not published
|
||||||
|
id: ensure_not_published
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
VERSION: ${{ steps.extract_version.outputs.VERSION }}
|
||||||
|
run: |
|
||||||
|
CRATE_NAME="${{ env.CRATE_NAME }}"
|
||||||
|
if [ ${#CRATE_NAME} -eq 1 ]; then
|
||||||
|
DIR="1"
|
||||||
|
elif [ ${#CRATE_NAME} -eq 2 ]; then
|
||||||
|
DIR="2"
|
||||||
|
elif [ ${#CRATE_NAME} -eq 3 ]; then
|
||||||
|
DIR="3/${CRATE_NAME:0:1}"
|
||||||
|
else
|
||||||
|
DIR="${CRATE_NAME:0:2}/${CRATE_NAME:2:2}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
FOUND=$(curl -sSL --fail-with-body "https://index.crates.io/$DIR/${{ env.CRATE_NAME }}" | jq -s 'any(.[]; .vers == "${{ env.VERSION }}")')
|
||||||
|
echo "FOUND=$FOUND" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -31,13 +67,17 @@ jobs:
|
||||||
target: aarch64-apple-darwin
|
target: aarch64-apple-darwin
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: Build for ${{ matrix.host }}-${{ matrix.arch }}
|
name: Build for ${{ matrix.host }}-${{ matrix.arch }}
|
||||||
|
needs: [ prepare ]
|
||||||
|
if: ${{ needs.prepare.outputs.found == 'false' }}
|
||||||
|
env:
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: Set env
|
- name: Set env
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
ARCHIVE_NAME=${{ env.BIN_NAME }}-$(echo ${{ github.ref_name }} | cut -c 2-)-${{ matrix.host }}-${{ matrix.arch }}
|
ARCHIVE_NAME=${{ env.BIN_NAME }}-${{ env.VERSION }}-${{ matrix.host }}-${{ matrix.arch }}
|
||||||
|
|
||||||
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
|
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
@ -91,7 +131,9 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
pull-requests: read
|
pull-requests: read
|
||||||
needs: [ build, publish ]
|
needs: [ prepare, publish ]
|
||||||
|
env:
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -107,7 +149,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
tag_name: ${{ github.ref_name }}
|
tag_name: ${{ github.ref_name }}
|
||||||
name: ${{ github.ref_name }}
|
name: v${{ env.VERSION }}
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: ${{ startsWith(github.ref_name, 'v0') }}
|
prerelease: ${{ startsWith(env.VERSION, '0') }}
|
||||||
files: artifacts/*
|
files: artifacts/*
|
200
CHANGELOG.md
200
CHANGELOG.md
|
@ -5,124 +5,93 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [0.5.0-rc.13] - 2024-11-28
|
## [0.5.3] - 2024-12-30
|
||||||
### Added
|
### Added
|
||||||
|
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Remove verbosity from release mode logging by @daimond113
|
||||||
|
|
||||||
|
## [0.5.2] - 2024-12-19
|
||||||
|
### Fixed
|
||||||
|
- Change dependency types for removed peer dependencies by @daimond113
|
||||||
|
- Resolve version to correct tag for `pesde_version` field by @daimond113
|
||||||
|
- Do not error on missing dependencies until full linking by @daimond113
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Switch from `log` to `tracing` for logging by @daimond113
|
||||||
|
|
||||||
|
## [0.5.1] - 2024-12-15
|
||||||
|
### Fixed
|
||||||
|
- Ignore build metadata when comparing CLI versions by @daimond113
|
||||||
|
|
||||||
|
## [0.5.0] - 2024-12-14
|
||||||
|
### Added
|
||||||
|
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
||||||
|
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
||||||
|
- Publish all workspace members when publishing a workspace by @daimond113
|
||||||
|
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
||||||
|
- Support full version requirements in workspace version field by @daimond113
|
||||||
|
- Improved authentication system for registry changes by @daimond113
|
||||||
|
- New website by @lukadev-0
|
||||||
|
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
|
||||||
|
- Support fallback Wally registries by @daimond113
|
||||||
- Print that no updates are available in `outdated` command by @daimond113
|
- Print that no updates are available in `outdated` command by @daimond113
|
||||||
- Support negated globs in `workspace_members` field by @daimond113
|
- Support negated globs in `workspace_members` field by @daimond113
|
||||||
- Make `includes` use glob patterns by @daimond113
|
- Make `includes` use glob patterns by @daimond113
|
||||||
- Use symlinks for workspace dependencies to not require reinstalling by @daimond113
|
- Use symlinks for workspace dependencies to not require reinstalling by @daimond113
|
||||||
- Add `auth token` command to print the auth token for the index by @daimond113
|
- Add `auth token` command to print the auth token for the index by @daimond113
|
||||||
- Support specifying which external registries are allowed on registries by @daimond113
|
- Support specifying which external registries are allowed on registries by @daimond113
|
||||||
|
- Add improved CLI styling by @daimond113
|
||||||
|
- Install pesde dependencies before Wally to support scripts packages by @daimond113
|
||||||
|
- Support packages exporting scripts by @daimond113
|
||||||
|
- Support using workspace root as a member by @daimond113
|
||||||
|
- Allow multiple, user selectable scripts packages to be selected (& custom packages inputted) in `init` command by @daimond113
|
||||||
|
- Support granular control over which repositories are allowed in various specifier types by @daimond113
|
||||||
|
- Display included scripts in `publish` command by @daimond113
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Install dependencies of packages in `x` command
|
- Fix versions with dots not being handled correctly by @daimond113
|
||||||
|
- Use workspace specifiers' `target` field when resolving by @daimond113
|
||||||
### Performance
|
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
||||||
- Asyncify dependency linking by @daimond113
|
- Remove duplicated manifest file name in `publish` command by @daimond113
|
||||||
|
- Allow use of Luau packages in `execute` command by @daimond113
|
||||||
## [0.5.0-rc.12] - 2024-11-22
|
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
||||||
### Added
|
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
|
||||||
- Support fallback Wally registries by @daimond113
|
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
|
||||||
|
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
|
||||||
### Fixed
|
- Make GitHub OAuth client ID config optional by @daimond113
|
||||||
- Fix peer dependencies being resolved incorrectly by @daimond113
|
- Use updated aliases when reusing lockfile dependencies by @daimond113
|
||||||
- Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
|
- Listen for device flow completion without requiring pressing enter by @daimond113
|
||||||
|
- Sync scripts repo in background by @daimond113
|
||||||
## [0.5.0-rc.11] - 2024-11-20
|
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
|
||||||
### Fixed
|
- Validate package names are lowercase by @daimond113
|
||||||
- Add back mistakenly removed updates check caching by @daimond113
|
|
||||||
- Set download error source to inner error to propagate the error by @daimond113
|
|
||||||
- Correctly copy workspace packages by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.10] - 2024-11-16
|
|
||||||
### Fixed
|
|
||||||
- Fix `self-install` doing a cross-device move by @daimond113
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Only store `pesde_version` executables in the version cache by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.9] - 2024-11-16
|
|
||||||
### Fixed
|
|
||||||
- Correctly link Wally server packages by @daimond113
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- `self-upgrade` now will check for updates by itself by default by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.8] - 2024-11-12
|
|
||||||
### Added
|
|
||||||
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
||||||
- Remove default.project.json from Git pesde dependencies by @daimond113
|
- Remove default.project.json from Git pesde dependencies by @daimond113
|
||||||
- Correctly (de)serialize workspace specifiers by @daimond113
|
- Correctly (de)serialize workspace specifiers by @daimond113
|
||||||
- Fix CAS finder algorithm issues with Windows by @daimond113
|
- Fix CAS finder algorithm issues with Windows by @daimond113
|
||||||
- Fix CAS finder algorithm's AlreadyExists error by @daimond113
|
- Fix CAS finder algorithm's AlreadyExists error by @daimond113
|
||||||
- Use moved path when setting file to read-only by @daimond113
|
- Use moved path when setting file to read-only by @daimond113
|
||||||
|
- Correctly link Wally server packages by @daimond113
|
||||||
|
- Fix `self-install` doing a cross-device move by @daimond113
|
||||||
|
- Add back mistakenly removed updates check caching by @daimond113
|
||||||
|
- Set download error source to inner error to propagate the error by @daimond113
|
||||||
|
- Correctly copy workspace packages by @daimond113
|
||||||
|
- Fix peer dependencies being resolved incorrectly by @daimond113
|
||||||
|
- Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
|
||||||
|
- Install dependencies of packages in `x` command by @daimond113
|
||||||
|
- Fix `includes` not supporting root files by @daimond113
|
||||||
|
- Link dependencies before type extraction to support more use cases by @daimond113
|
||||||
|
- Strip `.luau` extension from linker modules' require paths to comply with Luau by @daimond113
|
||||||
|
- Correctly handle graph paths for resolving overriden packages by @daimond113
|
||||||
|
- Do not require `--` in bin package executables on Unix by @daimond113
|
||||||
|
- Do not require lib or bin exports if package exports scripts by @daimond113
|
||||||
|
- Correctly resolve URLs in `publish` command by @daimond113
|
||||||
|
- Add Roblox types in linker modules even with no config generator script by @daimond113
|
||||||
|
|
||||||
### Changed
|
### Removed
|
||||||
- Switched to fs-err for better errors with file system operations by @daimond113
|
- Remove special scripts repo handling to favour standard packages by @daimond113
|
||||||
- Use body bytes over multipart for publishing packages by @daimond113
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
- Switch to async Rust by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.7] - 2024-10-30
|
|
||||||
### Added
|
|
||||||
- New website by @lukadev-0
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Use updated aliases when reusing lockfile dependencies by @daimond113
|
|
||||||
- Listen for device flow completion without requiring pressing enter by @daimond113
|
|
||||||
- Sync scripts repo in background by @daimond113
|
|
||||||
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
|
|
||||||
- Validate package names are lowercase by @daimond113
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
- Clone dependency repos shallowly by @daimond113
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Optimize boolean expression in `publish` command by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.6] - 2024-10-14
|
|
||||||
### Added
|
|
||||||
- Support full version requirements in workspace version field by @daimond113
|
|
||||||
- Improved authentication system for registry changes by @daimond113
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
|
|
||||||
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
|
|
||||||
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
|
|
||||||
- Make GitHub OAuth client ID config optional by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.5] - 2024-10-12
|
|
||||||
### Added
|
|
||||||
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
|
||||||
- Allow use of Luau packages in `execute` command by @daimond113
|
|
||||||
- Remove duplicated manifest file name in `publish` command by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.4] - 2024-10-12
|
|
||||||
### Added
|
|
||||||
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
|
||||||
- Publish all workspace members when publishing a workspace by @daimond113
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.3] - 2024-10-06
|
|
||||||
### Fixed
|
|
||||||
- Use workspace specifiers' `target` field when resolving by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.2] - 2024-10-06
|
|
||||||
### Added
|
|
||||||
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
|
||||||
### Fixed
|
|
||||||
- Fix versions with dots not being handled correctly by @daimond113
|
|
||||||
|
|
||||||
## [0.5.0-rc.1] - 2024-10-06
|
|
||||||
### Changed
|
### Changed
|
||||||
- Rewrite the entire project in a more maintainable way by @daimond113
|
- Rewrite the entire project in a more maintainable way by @daimond113
|
||||||
- Support workspaces by @daimond113
|
- Support workspaces by @daimond113
|
||||||
|
@ -130,17 +99,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
- Support multiple targets for a single package by @daimond113
|
- Support multiple targets for a single package by @daimond113
|
||||||
- Make registry much easier to self-host by @daimond113
|
- Make registry much easier to self-host by @daimond113
|
||||||
- Start maintaining a changelog by @daimond113
|
- Start maintaining a changelog by @daimond113
|
||||||
|
- Optimize boolean expression in `publish` command by @daimond113
|
||||||
|
- Switched to fs-err for better errors with file system operations by @daimond113
|
||||||
|
- Use body bytes over multipart for publishing packages by @daimond113
|
||||||
|
- `self-upgrade` now will check for updates by itself by default by @daimond113
|
||||||
|
- Only store `pesde_version` executables in the version cache by @daimond113
|
||||||
|
- Remove lower bound limit of 3 characters for pesde package names by @daimond113
|
||||||
|
|
||||||
[0.5.0-rc.13]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.12..v0.5.0-rc.13
|
### Performance
|
||||||
[0.5.0-rc.12]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.11..v0.5.0-rc.12
|
- Clone dependency repos shallowly by @daimond113
|
||||||
[0.5.0-rc.11]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.10..v0.5.0-rc.11
|
- Switch to async Rust by @daimond113
|
||||||
[0.5.0-rc.10]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.9..v0.5.0-rc.10
|
- Asyncify dependency linking by @daimond113
|
||||||
[0.5.0-rc.9]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.8..v0.5.0-rc.9
|
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113
|
||||||
[0.5.0-rc.8]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.7..v0.5.0-rc.8
|
|
||||||
[0.5.0-rc.7]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.6..v0.5.0-rc.7
|
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||||
[0.5.0-rc.6]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.5..v0.5.0-rc.6
|
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||||
[0.5.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.4..v0.5.0-rc.5
|
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0
|
||||||
[0.5.0-rc.4]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.3..v0.5.0-rc.4
|
[0.5.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
||||||
[0.5.0-rc.3]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.2..v0.5.0-rc.3
|
|
||||||
[0.5.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.1..v0.5.0-rc.2
|
|
||||||
[0.5.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0-rc.1
|
|
||||||
|
|
1001
Cargo.lock
generated
1001
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
70
Cargo.toml
70
Cargo.toml
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pesde"
|
name = "pesde"
|
||||||
version = "0.5.0-rc.13"
|
version = "0.5.3"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
authors = ["daimond113 <contact@daimond113.com>"]
|
authors = ["daimond113 <contact@daimond113.com>"]
|
||||||
|
@ -11,28 +11,28 @@ include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHAN
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
bin = [
|
bin = [
|
||||||
"clap",
|
"dep:clap",
|
||||||
"dirs",
|
"dep:dirs",
|
||||||
"pretty_env_logger",
|
"dep:tracing-subscriber",
|
||||||
"reqwest/json",
|
"reqwest/json",
|
||||||
"indicatif",
|
"dep:indicatif",
|
||||||
"indicatif-log-bridge",
|
"dep:tracing-indicatif",
|
||||||
"inquire",
|
"dep:inquire",
|
||||||
"toml_edit",
|
"dep:toml_edit",
|
||||||
"colored",
|
"dep:colored",
|
||||||
"anyhow",
|
"dep:anyhow",
|
||||||
"keyring",
|
"dep:keyring",
|
||||||
"open",
|
"dep:open",
|
||||||
"gix/worktree-mutation",
|
"gix/worktree-mutation",
|
||||||
"serde_json",
|
"dep:serde_json",
|
||||||
"winreg",
|
"dep:winreg",
|
||||||
"fs-err/expose_original_error",
|
"fs-err/expose_original_error",
|
||||||
"tokio/rt",
|
"tokio/rt",
|
||||||
"tokio/rt-multi-thread",
|
"tokio/rt-multi-thread",
|
||||||
"tokio/macros",
|
"tokio/macros",
|
||||||
]
|
]
|
||||||
wally-compat = ["async_zip", "serde_json"]
|
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||||
patches = ["git2"]
|
patches = ["dep:git2"]
|
||||||
version-management = ["bin"]
|
version-management = ["bin"]
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
|
@ -44,25 +44,25 @@ required-features = ["bin"]
|
||||||
uninlined_format_args = "warn"
|
uninlined_format_args = "warn"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde = { version = "1.0.214", features = ["derive"] }
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
serde_with = "3.11.0"
|
serde_with = "3.11.0"
|
||||||
gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||||
semver = { version = "1.0.23", features = ["serde"] }
|
semver = { version = "1.0.24", features = ["serde"] }
|
||||||
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
|
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
|
||||||
tokio-tar = "0.3.1"
|
tokio-tar = "0.3.1"
|
||||||
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
|
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||||
pathdiff = "0.2.2"
|
pathdiff = "0.2.3"
|
||||||
relative-path = { version = "1.9.3", features = ["serde"] }
|
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||||
log = "0.4.22"
|
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||||
thiserror = "2.0.2"
|
thiserror = "2.0.7"
|
||||||
tokio = "1.41.0"
|
tokio = { version = "1.42.0", features = ["process"] }
|
||||||
tokio-util = "0.7.12"
|
tokio-util = "0.7.13"
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
full_moon = { version = "1.1.0", features = ["luau"] }
|
full_moon = { version = "1.1.2", features = ["luau"] }
|
||||||
url = { version = "2.5.3", features = ["serde"] }
|
url = { version = "2.5.4", features = ["serde"] }
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.39", features = ["serde"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tempfile = "3.14.0"
|
tempfile = "3.14.0"
|
||||||
wax = { version = "0.6.0", default-features = false }
|
wax = { version = "0.6.0", default-features = false }
|
||||||
|
@ -72,18 +72,18 @@ fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
git2 = { version = "0.19.0", optional = true }
|
git2 = { version = "0.19.0", optional = true }
|
||||||
|
|
||||||
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
||||||
serde_json = { version = "1.0.132", optional = true }
|
serde_json = { version = "1.0.133", optional = true }
|
||||||
|
|
||||||
anyhow = { version = "1.0.93", optional = true }
|
anyhow = { version = "1.0.94", optional = true }
|
||||||
open = { version = "5.3.0", optional = true }
|
open = { version = "5.3.1", optional = true }
|
||||||
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
|
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
|
||||||
colored = { version = "2.1.0", optional = true }
|
colored = { version = "2.1.0", optional = true }
|
||||||
toml_edit = { version = "0.22.22", optional = true }
|
toml_edit = { version = "0.22.22", optional = true }
|
||||||
clap = { version = "4.5.20", features = ["derive"], optional = true }
|
clap = { version = "4.5.23", features = ["derive"], optional = true }
|
||||||
dirs = { version = "5.0.1", optional = true }
|
dirs = { version = "5.0.1", optional = true }
|
||||||
pretty_env_logger = { version = "0.5.0", optional = true }
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||||
indicatif = { version = "0.17.8", optional = true }
|
indicatif = { version = "0.17.9", optional = true }
|
||||||
indicatif-log-bridge = { version = "0.2.3", optional = true }
|
tracing-indicatif = { version = "0.3.8", optional = true }
|
||||||
inquire = { version = "0.7.5", optional = true }
|
inquire = { version = "0.7.5", optional = true }
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dependencies]
|
[target.'cfg(target_os = "windows")'.dependencies]
|
||||||
|
|
25
SECURITY.md
Normal file
25
SECURITY.md
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
As pesde is currently in version 0.x, we can only guarantee security for:
|
||||||
|
- **The latest minor** (currently 0.5).
|
||||||
|
- **The latest release candidate for the next version**, if available.
|
||||||
|
|
||||||
|
When a new minor version is released, the previous version will immediately lose security support.
|
||||||
|
> **Note:** This policy will change with the release of version 1.0, which will include an extended support period for versions >=1.0.
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 0.5.x | :white_check_mark: |
|
||||||
|
| < 0.5 | :x: |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
We encourage all security concerns to be reported at [pesde@daimond113.com](mailto:pesde@daimond113.com), along the following format:
|
||||||
|
- **Subject**: The subject must be prefixed with `[SECURITY]` to ensure it is prioritized as a security concern.
|
||||||
|
- **Content**:
|
||||||
|
- **Affected Versions**: Clearly specify which are affected by the issue.
|
||||||
|
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
|
||||||
|
|
||||||
|
We will try to respond as soon as possible.
|
|
@ -2,10 +2,9 @@ import starlight from "@astrojs/starlight"
|
||||||
import tailwind from "@astrojs/tailwind"
|
import tailwind from "@astrojs/tailwind"
|
||||||
import { defineConfig } from "astro/config"
|
import { defineConfig } from "astro/config"
|
||||||
|
|
||||||
import vercel from "@astrojs/vercel/serverless"
|
|
||||||
|
|
||||||
// https://astro.build/config
|
// https://astro.build/config
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
|
site: "https://docs.pesde.daimond113.com",
|
||||||
integrations: [
|
integrations: [
|
||||||
starlight({
|
starlight({
|
||||||
title: "pesde docs",
|
title: "pesde docs",
|
||||||
|
@ -105,6 +104,4 @@ export default defineConfig({
|
||||||
vite: {
|
vite: {
|
||||||
envDir: "..",
|
envDir: "..",
|
||||||
},
|
},
|
||||||
output: "hybrid",
|
|
||||||
adapter: vercel(),
|
|
||||||
})
|
})
|
||||||
|
|
BIN
docs/bun.lockb
BIN
docs/bun.lockb
Binary file not shown.
|
@ -14,7 +14,6 @@
|
||||||
"@astrojs/starlight": "^0.28.2",
|
"@astrojs/starlight": "^0.28.2",
|
||||||
"@astrojs/starlight-tailwind": "^2.0.3",
|
"@astrojs/starlight-tailwind": "^2.0.3",
|
||||||
"@astrojs/tailwind": "^5.1.1",
|
"@astrojs/tailwind": "^5.1.1",
|
||||||
"@astrojs/vercel": "^7.8.1",
|
|
||||||
"@fontsource-variable/nunito-sans": "^5.1.0",
|
"@fontsource-variable/nunito-sans": "^5.1.0",
|
||||||
"@shikijs/rehype": "^1.21.0",
|
"@shikijs/rehype": "^1.21.0",
|
||||||
"astro": "^4.15.9",
|
"astro": "^4.15.9",
|
||||||
|
@ -27,4 +26,4 @@
|
||||||
"prettier-plugin-astro": "^0.14.1",
|
"prettier-plugin-astro": "^0.14.1",
|
||||||
"prettier-plugin-tailwindcss": "^0.6.8"
|
"prettier-plugin-tailwindcss": "^0.6.8"
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -3,7 +3,12 @@
|
||||||
href="https://pesde.daimond113.com/"
|
href="https://pesde.daimond113.com/"
|
||||||
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
|
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
|
||||||
>
|
>
|
||||||
<svg viewBox="0 0 56 28" class="h-7" fill="none" xmlns="http://www.w3.org/2000/svg">
|
<svg
|
||||||
|
viewBox="0 0 56 28"
|
||||||
|
class="h-7"
|
||||||
|
fill="none"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
>
|
||||||
<title>pesde</title>
|
<title>pesde</title>
|
||||||
<path
|
<path
|
||||||
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
|
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
|
||||||
|
@ -22,7 +27,8 @@
|
||||||
fill="currentColor"></path>
|
fill="currentColor"></path>
|
||||||
</svg>
|
</svg>
|
||||||
</a>
|
</a>
|
||||||
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span>
|
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
|
||||||
|
>
|
||||||
<a
|
<a
|
||||||
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
|
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
|
||||||
href="/">docs</a
|
href="/">docs</a
|
||||||
|
|
|
@ -38,17 +38,17 @@ Git dependencies are dependencies on packages hosted on a Git repository.
|
||||||
|
|
||||||
```toml title="pesde.toml"
|
```toml title="pesde.toml"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
acme = { repo = "acme/package", rev = "main" }
|
acme = { repo = "acme/package", rev = "aeff6" }
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, we're specifying a dependency on the package contained within
|
In this example, we're specifying a dependency on the package contained within
|
||||||
the `acme/package` GitHub repository at the `main` branch.
|
the `acme/package` GitHub repository at the `aeff6` commit.
|
||||||
|
|
||||||
You can also use a URL to specify the Git repository and a specific commit.
|
You can also use a URL to specify the Git repository and a tag for the revision.
|
||||||
|
|
||||||
```toml title="pesde.toml"
|
```toml title="pesde.toml"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
acme = { repo = "https://git.acme.local/package.git", rev = "aeff6" }
|
acme = { repo = "https://git.acme.local/package.git", rev = "v0.1.0" }
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also specify a path if the package is not at the root of the repository.
|
You can also specify a path if the package is not at the root of the repository.
|
||||||
|
|
|
@ -14,28 +14,25 @@ repository contains a list of scripts for different sync tools. If the tool
|
||||||
you are using is not supported, you can write your own script and submit a PR
|
you are using is not supported, you can write your own script and submit a PR
|
||||||
to get it added.
|
to get it added.
|
||||||
|
|
||||||
These scripts are automatically cloned into the `~/.pesde/scripts` folder and
|
|
||||||
kept up to date when you use pesde.
|
|
||||||
|
|
||||||
## Usage with Rojo
|
## Usage with Rojo
|
||||||
|
|
||||||
[Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox
|
[Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox
|
||||||
Studio.
|
Studio.
|
||||||
|
|
||||||
Running `pesde init` will prompt you to select a target, select
|
Running `pesde init` will prompt you to select a target, select
|
||||||
`roblox` or `roblox_server` in this case. This will setup the configuration
|
`roblox` or `roblox_server` in this case. You will be prompted to pick out a
|
||||||
needed to use pesde in a project using Rojo.
|
scripts package. Select `pesde/scripts_rojo` to get started with Rojo.
|
||||||
|
|
||||||
## Usage with other tools
|
## Usage with other tools
|
||||||
|
|
||||||
If you are using a different sync tool, you should look for it's scripts in the
|
If you are using a different sync tool, you should look for it's scripts
|
||||||
pesde-scripts repository. If you cannot find them, you can write your own and
|
package on the registry. If you cannot find it, you can write your own and
|
||||||
optionally submit a PR to help others using the same tool as you get started
|
optionally submit a PR to pesde-scripts to help others using the same tool as
|
||||||
quicker.
|
you get started quicker.
|
||||||
|
|
||||||
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
|
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
|
||||||
target, and then replace the `.pesde/roblox_sync_config_generator.luau` script
|
target, and then create a `.pesde/roblox_sync_config_generator.luau` script
|
||||||
with the one you want to use.
|
and put it's path in the manifest.
|
||||||
|
|
||||||
## Authoring packages
|
## Authoring packages
|
||||||
|
|
||||||
|
|
53
docs/src/content/docs/guides/scripts-packages.mdx
Normal file
53
docs/src/content/docs/guides/scripts-packages.mdx
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
title: Using Scripts Packages
|
||||||
|
description: Learn how to use scripts packages.
|
||||||
|
---
|
||||||
|
|
||||||
|
A **scripts package** is a package that contains scripts. The scripts provided
|
||||||
|
by the package are linked in `.pesde/{alias}/{script_name}.luau` of the project
|
||||||
|
that uses the package.
|
||||||
|
|
||||||
|
## Using a scripts package
|
||||||
|
|
||||||
|
Scripts packages can be installed using the `pesde add` and `pesde install`
|
||||||
|
commands.
|
||||||
|
|
||||||
|
This requires a `pesde.toml` file to be present in the current directory, and
|
||||||
|
will add the scripts package to the `dependencies` section of the file.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pesde add pesde/scripts_rojo
|
||||||
|
pesde install
|
||||||
|
```
|
||||||
|
|
||||||
|
This will add the scripts package to your project, and installing will put the
|
||||||
|
scripts at `.pesde/scripts_rojo/{script_name}.luau`. You can then add the scripts
|
||||||
|
to your manifest, for example:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
[scripts]
|
||||||
|
roblox_sync_config_generator = ".pesde/scripts_rojo/roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Making a scripts package
|
||||||
|
|
||||||
|
To make a scripts package you must use a target compatible with scripts exports.
|
||||||
|
These currently are `lune` and `luau`.
|
||||||
|
|
||||||
|
Here is an example of a scripts package:
|
||||||
|
|
||||||
|
```toml title="pesde.toml"
|
||||||
|
name = "pesde/scripts_rojo"
|
||||||
|
version = "1.0.0"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[target]
|
||||||
|
environment = "lune"
|
||||||
|
|
||||||
|
[target.scripts]
|
||||||
|
roblox_sync_config_generator = "roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
|
The `scripts` table in the target is a map of script names to the path of the
|
||||||
|
script in the package. The scripts will be linked in the project that uses the
|
||||||
|
package at `.pesde/{alias}/{script_name}.luau`.
|
|
@ -19,10 +19,10 @@ To create an index, create a new repository and add a `config.toml` file with
|
||||||
the following content:
|
the following content:
|
||||||
|
|
||||||
```toml title="config.toml"
|
```toml title="config.toml"
|
||||||
# The URL of the registry API
|
# the URL of the registry API
|
||||||
api = "https://registry.acme.local/"
|
api = "https://registry.acme.local/"
|
||||||
|
|
||||||
# Package download URL (optional)
|
# package download URL (optional)
|
||||||
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
|
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
|
||||||
|
|
||||||
# the client ID of the GitHub OAuth app (optional)
|
# the client ID of the GitHub OAuth app (optional)
|
||||||
|
@ -33,13 +33,16 @@ git_allowed = true
|
||||||
|
|
||||||
# whether to allow packages which depend on packages from other registries
|
# whether to allow packages which depend on packages from other registries
|
||||||
# (default: false)
|
# (default: false)
|
||||||
other_registries_allowed = true
|
other_registries_allowed = ["https://git.acme.local/index"]
|
||||||
|
|
||||||
# whether to allow packages with Wally dependencies (default: false)
|
# whether to allow packages with Wally dependencies (default: false)
|
||||||
wally_allowed = false
|
wally_allowed = false
|
||||||
|
|
||||||
# the maximum size of the archive in bytes (default: 4MB)
|
# the maximum size of the archive in bytes (default: 4MB)
|
||||||
max_archive_size = 4194304
|
max_archive_size = 4194304
|
||||||
|
|
||||||
|
# the scripts packages present in the `init` command selection by default
|
||||||
|
scripts_packages = ["pesde/scripts_rojo"]
|
||||||
```
|
```
|
||||||
|
|
||||||
- **api**: The URL of the registry API. See below for more information.
|
- **api**: The URL of the registry API. See below for more information.
|
||||||
|
@ -60,18 +63,24 @@ max_archive_size = 4194304
|
||||||
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
|
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
|
||||||
authentication. See below for more information.
|
authentication. See below for more information.
|
||||||
|
|
||||||
- **git_allowed**: Whether to allow packages with Git dependencies. This is
|
- **git_allowed**: Whether to allow packages with Git dependencies. This can be
|
||||||
optional and defaults to `false`.
|
either a bool or a list of allowed repository URLs. This is optional and
|
||||||
|
defaults to `false`.
|
||||||
|
|
||||||
- **other_registries_allowed**: Whether to allow packages which depend on
|
- **other_registries_allowed**: Whether to allow packages which depend on
|
||||||
packages from other registries. This is optional and defaults to `false`.
|
packages from other registries. This can be either a bool or a list of
|
||||||
|
allowed index repository URLs. This is optional and defaults to `false`.
|
||||||
|
|
||||||
- **wally_allowed**: Whether to allow packages with Wally dependencies. This is
|
- **wally_allowed**: Whether to allow packages with Wally dependencies. This can
|
||||||
|
be either a bool or a list of allowed index repository URLs. This is
|
||||||
optional and defaults to `false`.
|
optional and defaults to `false`.
|
||||||
|
|
||||||
- **max_archive_size**: The maximum size of the archive in bytes. This is
|
- **max_archive_size**: The maximum size of the archive in bytes. This is
|
||||||
optional and defaults to `4194304` (4MB).
|
optional and defaults to `4194304` (4MB).
|
||||||
|
|
||||||
|
- **scripts_packages**: The scripts packages present in the `init` command
|
||||||
|
selection by default. This is optional and defaults to none.
|
||||||
|
|
||||||
You should then push this repository to [GitHub](https://github.com/).
|
You should then push this repository to [GitHub](https://github.com/).
|
||||||
|
|
||||||
## Configuring the registry
|
## Configuring the registry
|
||||||
|
@ -88,8 +97,8 @@ has access to the index repository. We recommend using a separate account
|
||||||
for this purpose.
|
for this purpose.
|
||||||
|
|
||||||
<Aside>
|
<Aside>
|
||||||
For a GitHub account the password **must** be a personal access token. For
|
For a GitHub account the password **must** be a personal access token. For instructions on how to
|
||||||
instructions on how to create a personal access token, see the [GitHub
|
create a personal access token, see the [GitHub
|
||||||
documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens).
|
documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens).
|
||||||
The access token must have read and write access to the index repository.
|
The access token must have read and write access to the index repository.
|
||||||
</Aside>
|
</Aside>
|
||||||
|
|
|
@ -41,6 +41,16 @@ You can follow the installation instructions in the
|
||||||
pesde should now be installed on your system. You may need to restart your
|
pesde should now be installed on your system. You may need to restart your
|
||||||
computer for the changes to take effect.
|
computer for the changes to take effect.
|
||||||
|
|
||||||
|
<Aside type="caution">
|
||||||
|
pesde uses symlinks which are an administrator-level operation on Windows.
|
||||||
|
To ensure proper functionality, enable [Developer Mode](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
|
||||||
|
|
||||||
|
|
||||||
|
If you are getting errors such as `Failed to symlink file, a required
|
||||||
|
privilege is not held by the client`, then enabling this setting will fix
|
||||||
|
them.
|
||||||
|
</Aside>
|
||||||
|
|
||||||
</TabItem>
|
</TabItem>
|
||||||
<TabItem label="Linux & macOS">
|
<TabItem label="Linux & macOS">
|
||||||
|
|
||||||
|
@ -59,7 +69,7 @@ You can follow the installation instructions in the
|
||||||
environment variable.
|
environment variable.
|
||||||
|
|
||||||
```sh title=".zshrc"
|
```sh title=".zshrc"
|
||||||
export PATH = "$PATH:/home/user/.pesde/bin"
|
export PATH="$PATH:$HOME/.pesde/bin"
|
||||||
```
|
```
|
||||||
|
|
||||||
You should then be able to run `pesde` after restarting your shell.
|
You should then be able to run `pesde` after restarting your shell.
|
||||||
|
|
|
@ -27,13 +27,13 @@ environment we're targeting is `luau`.
|
||||||
```sh
|
```sh
|
||||||
pesde init
|
pesde init
|
||||||
|
|
||||||
# What is the name of the project? <username>/hello_pesde
|
# what is the name of the project? <username>/hello_pesde
|
||||||
# What is the description of the project? (leave empty for none)
|
# what is the description of the project?
|
||||||
# Who are the authors of this project? (leave empty for none, comma separated)
|
# who are the authors of this project?
|
||||||
# What is the repository URL of this project? (leave empty for none)
|
# what is the repository URL of this project?
|
||||||
# What is the license of this project? (leave empty for none) MIT
|
# what is the license of this project? MIT
|
||||||
# What environment are you targeting for your package? luau
|
# what environment are you targeting for your package? luau
|
||||||
# Would you like to setup a default roblox_sync_config_generator script? No
|
# would you like to setup default Roblox compatibility scripts? No
|
||||||
```
|
```
|
||||||
|
|
||||||
The command will create a `pesde.toml` file in the current folder. Go ahead
|
The command will create a `pesde.toml` file in the current folder. Go ahead
|
||||||
|
@ -111,7 +111,7 @@ pesde install
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see that pesde has created a `luau_packages` folder containing the
|
You should see that pesde has created a `luau_packages` folder containing the
|
||||||
newly installed package. It has alsoo created a `pesde.lock` file, this file
|
newly installed package. It has also created a `pesde.lock` file, this file
|
||||||
contains the exact versions of the dependencies that were installed so that
|
contains the exact versions of the dependencies that were installed so that
|
||||||
they can be installed again in the future.
|
they can be installed again in the future.
|
||||||
|
|
||||||
|
|
|
@ -55,19 +55,6 @@ is printed.
|
||||||
|
|
||||||
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
|
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
|
||||||
|
|
||||||
### `pesde config scripts-repo`
|
|
||||||
|
|
||||||
```sh
|
|
||||||
pesde config scripts-repo [REPO]
|
|
||||||
```
|
|
||||||
|
|
||||||
Configures the scripts repository. If no repository is provided, the current
|
|
||||||
scripts repository is printed.
|
|
||||||
|
|
||||||
- `-r, --reset`: Resets the scripts repository.
|
|
||||||
|
|
||||||
The default scripts repository is [`pesde-scripts`](https://github.com/pesde-pkg/scripts).
|
|
||||||
|
|
||||||
## `pesde init`
|
## `pesde init`
|
||||||
|
|
||||||
Initializes a new pesde project in the current directory.
|
Initializes a new pesde project in the current directory.
|
||||||
|
|
|
@ -155,6 +155,19 @@ build_files = [
|
||||||
These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator)
|
These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator)
|
||||||
when the package is installed in order to generate the necessary configuration.
|
when the package is installed in order to generate the necessary configuration.
|
||||||
|
|
||||||
|
### `scripts`
|
||||||
|
|
||||||
|
**Allowed in:** `luau`, `lune`
|
||||||
|
|
||||||
|
A list of scripts that will be linked to the dependant's `.pesde` directory, and
|
||||||
|
copied over to the [scripts](#scripts-1) section when initialising a project with
|
||||||
|
this package as the scripts package.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[target.scripts]
|
||||||
|
roblox_sync_config_generator = "scripts/roblox_sync_config_generator.luau"
|
||||||
|
```
|
||||||
|
|
||||||
## `[scripts]`
|
## `[scripts]`
|
||||||
|
|
||||||
The `[scripts]` section contains scripts that can be run using the `pesde run`
|
The `[scripts]` section contains scripts that can be run using the `pesde run`
|
||||||
|
@ -177,10 +190,6 @@ sync tools.
|
||||||
of files specified within the [`target.build_files`](#build_files) of the
|
of files specified within the [`target.build_files`](#build_files) of the
|
||||||
package.
|
package.
|
||||||
|
|
||||||
You can find template scripts inside the
|
|
||||||
[`pesde-scripts` repository](https://github.com/pesde-pkg/scripts)
|
|
||||||
for various sync tools.
|
|
||||||
|
|
||||||
<LinkCard
|
<LinkCard
|
||||||
title="Roblox"
|
title="Roblox"
|
||||||
description="Learn more about using pesde in Roblox projects."
|
description="Learn more about using pesde in Roblox projects."
|
||||||
|
@ -190,7 +199,7 @@ for various sync tools.
|
||||||
<LinkCard
|
<LinkCard
|
||||||
title="Example script for Rojo"
|
title="Example script for Rojo"
|
||||||
description="An example script for generating configuration for Rojo."
|
description="An example script for generating configuration for Rojo."
|
||||||
href="https://github.com/pesde-pkg/scripts/blob/master/lune/rojo/roblox_sync_config_generator.luau"
|
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sync_config.luau"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
### `sourcemap_generator`
|
### `sourcemap_generator`
|
||||||
|
@ -205,7 +214,7 @@ through `process.args`.
|
||||||
<LinkCard
|
<LinkCard
|
||||||
title="Example script for Rojo"
|
title="Example script for Rojo"
|
||||||
description="An example script for generating configuration for Rojo."
|
description="An example script for generating configuration for Rojo."
|
||||||
href="https://github.com/pesde-pkg/scripts/blob/master/lune/rojo/sourcemap_generator.luau"
|
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sourcemap.luau"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
## `[indices]`
|
## `[indices]`
|
||||||
|
@ -360,14 +369,14 @@ foo = { wally = "acme/foo", version = "1.2.3", index = "acme" }
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[dependencies]
|
[dependencies]
|
||||||
foo = { repo = "acme/packages", rev = "main", path = "foo" }
|
foo = { repo = "acme/packages", rev = "aeff6", path = "foo" }
|
||||||
```
|
```
|
||||||
|
|
||||||
**Git dependencies** contain the following fields:
|
**Git dependencies** contain the following fields:
|
||||||
|
|
||||||
- `repo`: The URL of the Git repository.
|
- `repo`: The URL of the Git repository.
|
||||||
This can either be `<owner>/<name>` for a GitHub repository, or a full URL.
|
This can either be `<owner>/<name>` for a GitHub repository, or a full URL.
|
||||||
- `rev`: The Git revision to install. This can be a branch, tag, or commit hash.
|
- `rev`: The Git revision to install. This can be a tag or commit hash.
|
||||||
- `path`: The path within the repository to install. If not specified, the root
|
- `path`: The path within the repository to install. If not specified, the root
|
||||||
of the repository is used.
|
of the repository is used.
|
||||||
|
|
||||||
|
|
22
registry/CHANGELOG.md
Normal file
22
registry/CHANGELOG.md
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.2]
|
||||||
|
### Changed
|
||||||
|
- Update to pesde lib API changes by @daimond113
|
||||||
|
|
||||||
|
## [0.1.1] - 2024-12-19
|
||||||
|
### Changed
|
||||||
|
- Switch to traccing for logging by @daimond113
|
||||||
|
|
||||||
|
## [0.1.0] - 2024-12-14
|
||||||
|
### Added
|
||||||
|
- Rewrite registry for pesde v0.5.0 by @daimond113
|
||||||
|
|
||||||
|
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||||
|
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||||
|
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pesde-registry"
|
name = "pesde-registry"
|
||||||
version = "0.7.0"
|
version = "0.1.2"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
repository = "https://github.com/pesde-pkg/index"
|
repository = "https://github.com/pesde-pkg/index"
|
||||||
publish = false
|
publish = false
|
||||||
|
@ -8,27 +8,26 @@ publish = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "4.9.0"
|
actix-web = "4.9.0"
|
||||||
actix-cors = "0.7.0"
|
actix-cors = "0.7.0"
|
||||||
actix-governor = "0.7.0"
|
actix-governor = "0.8.0"
|
||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
thiserror = "2.0.2"
|
thiserror = "2.0.7"
|
||||||
tantivy = "0.22.0"
|
tantivy = "0.22.0"
|
||||||
semver = "1.0.23"
|
semver = "1.0.24"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.39", features = ["serde"] }
|
||||||
url = "2.5.2"
|
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
tokio = "1.41.0"
|
tokio = "1.42.0"
|
||||||
tempfile = "3.14.0"
|
tempfile = "3.14.0"
|
||||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
|
|
||||||
git2 = "0.19.0"
|
git2 = "0.19.0"
|
||||||
gix = { version = "0.67.0", default-features = false, features = [
|
gix = { version = "0.68.0", default-features = false, features = [
|
||||||
"blocking-http-transport-reqwest-rust-tls",
|
"blocking-http-transport-reqwest-rust-tls",
|
||||||
"credentials",
|
"credentials",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
serde = "1.0.214"
|
serde = "1.0.216"
|
||||||
serde_json = "1.0.132"
|
serde_json = "1.0.133"
|
||||||
serde_yaml = "0.9.34"
|
serde_yaml = "0.9.34"
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
|
@ -39,16 +38,13 @@ reqwest = { version = "0.12.9", features = ["json", "rustls-tls"] }
|
||||||
constant_time_eq = "0.3.1"
|
constant_time_eq = "0.3.1"
|
||||||
|
|
||||||
tokio-tar = "0.3.1"
|
tokio-tar = "0.3.1"
|
||||||
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
|
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||||
|
|
||||||
log = "0.4.22"
|
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||||
pretty_env_logger = "0.5.0"
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||||
|
tracing-actix-web = "0.7.15"
|
||||||
|
|
||||||
sentry = "0.34.0"
|
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
|
||||||
sentry-log = "0.34.0"
|
sentry-actix = "0.35.0"
|
||||||
sentry-actix = "0.34.0"
|
|
||||||
|
|
||||||
pesde = { path = "..", features = [
|
pesde = { path = "..", features = ["wally-compat"] }
|
||||||
"wally-compat",
|
|
||||||
"git2",
|
|
||||||
] }
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl AuthImpl for GitHubAuth {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
log::error!(
|
tracing::error!(
|
||||||
"failed to get user: {}",
|
"failed to get user: {}",
|
||||||
response.into_error().await.unwrap_err()
|
response.into_error().await.unwrap_err()
|
||||||
);
|
);
|
||||||
|
@ -53,7 +53,7 @@ impl AuthImpl for GitHubAuth {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to get user: {e}");
|
tracing::error!("failed to get user: {e}");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -61,7 +61,7 @@ impl AuthImpl for GitHubAuth {
|
||||||
let user_id = match response.json::<UserResponse>().await {
|
let user_id = match response.json::<UserResponse>().await {
|
||||||
Ok(resp) => resp.user.id,
|
Ok(resp) => resp.user.id,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to get user: {e}");
|
tracing::error!("failed to get user: {e}");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -71,7 +71,7 @@ pub async fn get_package_version(
|
||||||
|
|
||||||
let (scope, name_part) = name.as_str();
|
let (scope, name_part) = name.as_str();
|
||||||
|
|
||||||
let entries: IndexFile = {
|
let file: IndexFile = {
|
||||||
let source = app_state.source.lock().await;
|
let source = app_state.source.lock().await;
|
||||||
let repo = gix::open(source.path(&app_state.project))?;
|
let repo = gix::open(source.path(&app_state.project))?;
|
||||||
let tree = root_tree(&repo)?;
|
let tree = root_tree(&repo)?;
|
||||||
|
@ -84,14 +84,15 @@ pub async fn get_package_version(
|
||||||
|
|
||||||
let Some((v_id, entry, targets)) = ({
|
let Some((v_id, entry, targets)) = ({
|
||||||
let version = match version {
|
let version = match version {
|
||||||
VersionRequest::Latest => match entries.keys().map(|k| k.version()).max() {
|
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() {
|
||||||
Some(latest) => latest.clone(),
|
Some(latest) => latest.clone(),
|
||||||
None => return Ok(HttpResponse::NotFound().finish()),
|
None => return Ok(HttpResponse::NotFound().finish()),
|
||||||
},
|
},
|
||||||
VersionRequest::Specific(version) => version,
|
VersionRequest::Specific(version) => version,
|
||||||
};
|
};
|
||||||
|
|
||||||
let versions = entries
|
let versions = file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(v_id, _)| *v_id.version() == version);
|
.filter(|(v_id, _)| *v_id.version() == version);
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub async fn get_package_versions(
|
||||||
|
|
||||||
let (scope, name_part) = name.as_str();
|
let (scope, name_part) = name.as_str();
|
||||||
|
|
||||||
let versions: IndexFile = {
|
let file: IndexFile = {
|
||||||
let source = app_state.source.lock().await;
|
let source = app_state.source.lock().await;
|
||||||
let repo = gix::open(source.path(&app_state.project))?;
|
let repo = gix::open(source.path(&app_state.project))?;
|
||||||
let tree = root_tree(&repo)?;
|
let tree = root_tree(&repo)?;
|
||||||
|
@ -32,7 +32,7 @@ pub async fn get_package_versions(
|
||||||
|
|
||||||
let mut responses = BTreeMap::new();
|
let mut responses = BTreeMap::new();
|
||||||
|
|
||||||
for (v_id, entry) in versions {
|
for (v_id, entry) in file.entries {
|
||||||
let info = responses
|
let info = responses
|
||||||
.entry(v_id.version().clone())
|
.entry(v_id.version().clone())
|
||||||
.or_insert_with(|| PackageResponse {
|
.or_insert_with(|| PackageResponse {
|
||||||
|
|
|
@ -304,7 +304,7 @@ pub async fn publish_package(
|
||||||
.filter(|index| match gix::Url::try_from(*index) {
|
.filter(|index| match gix::Url::try_from(*index) {
|
||||||
Ok(url) => config
|
Ok(url) => config
|
||||||
.other_registries_allowed
|
.other_registries_allowed
|
||||||
.is_allowed(source.repo_url().clone(), url),
|
.is_allowed_or_same(source.repo_url().clone(), url),
|
||||||
Err(_) => false,
|
Err(_) => false,
|
||||||
})
|
})
|
||||||
.is_none()
|
.is_none()
|
||||||
|
@ -315,16 +315,13 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Wally(specifier) => {
|
DependencySpecifiers::Wally(specifier) => {
|
||||||
if !config.wally_allowed {
|
|
||||||
return Err(Error::InvalidArchive(
|
|
||||||
"wally dependencies are not allowed".into(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if specifier
|
if specifier
|
||||||
.index
|
.index
|
||||||
.as_ref()
|
.as_deref()
|
||||||
.filter(|index| index.parse::<url::Url>().is_ok())
|
.filter(|index| match gix::Url::try_from(*index) {
|
||||||
|
Ok(url) => config.wally_allowed.is_allowed(url),
|
||||||
|
Err(_) => false,
|
||||||
|
})
|
||||||
.is_none()
|
.is_none()
|
||||||
{
|
{
|
||||||
return Err(Error::InvalidArchive(format!(
|
return Err(Error::InvalidArchive(format!(
|
||||||
|
@ -332,15 +329,15 @@ pub async fn publish_package(
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {
|
DependencySpecifiers::Git(specifier) => {
|
||||||
if !config.git_allowed {
|
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
|
||||||
return Err(Error::InvalidArchive(
|
return Err(Error::InvalidArchive(
|
||||||
"git dependencies are not allowed".into(),
|
"git dependencies are not allowed".into(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Workspace(_) => {
|
DependencySpecifiers::Workspace(_) => {
|
||||||
// workspace specifiers are to be transformed into Pesde specifiers by the sender
|
// workspace specifiers are to be transformed into pesde specifiers by the sender
|
||||||
return Err(Error::InvalidArchive(
|
return Err(Error::InvalidArchive(
|
||||||
"non-transformed workspace dependency".into(),
|
"non-transformed workspace dependency".into(),
|
||||||
));
|
));
|
||||||
|
@ -348,8 +345,9 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = source.repo_git2(&app_state.project)?;
|
let repo = Repository::open_bare(source.path(&app_state.project))?;
|
||||||
let gix_repo = gix::open(repo.path())?;
|
let gix_repo = gix::open(repo.path())?;
|
||||||
|
|
||||||
let gix_tree = root_tree(&gix_repo)?;
|
let gix_tree = root_tree(&gix_repo)?;
|
||||||
|
|
||||||
let (scope, name) = manifest.name.as_str();
|
let (scope, name) = manifest.name.as_str();
|
||||||
|
@ -373,7 +371,7 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut entries: IndexFile =
|
let mut file: IndexFile =
|
||||||
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
|
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
|
||||||
|
|
||||||
let new_entry = IndexFileEntry {
|
let new_entry = IndexFileEntry {
|
||||||
|
@ -388,11 +386,12 @@ pub async fn publish_package(
|
||||||
dependencies,
|
dependencies,
|
||||||
};
|
};
|
||||||
|
|
||||||
let this_version = entries
|
let this_version = file
|
||||||
|
.entries
|
||||||
.keys()
|
.keys()
|
||||||
.find(|v_id| *v_id.version() == manifest.version);
|
.find(|v_id| *v_id.version() == manifest.version);
|
||||||
if let Some(this_version) = this_version {
|
if let Some(this_version) = this_version {
|
||||||
let other_entry = entries.get(this_version).unwrap();
|
let other_entry = file.entries.get(this_version).unwrap();
|
||||||
|
|
||||||
// description cannot be different - which one to render in the "Recently published" list?
|
// description cannot be different - which one to render in the "Recently published" list?
|
||||||
// the others cannot be different because what to return from the versions endpoint?
|
// the others cannot be different because what to return from the versions endpoint?
|
||||||
|
@ -408,7 +407,8 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if entries
|
if file
|
||||||
|
.entries
|
||||||
.insert(
|
.insert(
|
||||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||||
new_entry.clone(),
|
new_entry.clone(),
|
||||||
|
@ -424,7 +424,7 @@ pub async fn publish_package(
|
||||||
let reference = repo.find_reference(&refspec)?;
|
let reference = repo.find_reference(&refspec)?;
|
||||||
|
|
||||||
{
|
{
|
||||||
let index_content = toml::to_string(&entries)?;
|
let index_content = toml::to_string(&file)?;
|
||||||
let mut blob_writer = repo.blob_writer(None)?;
|
let mut blob_writer = repo.blob_writer(None)?;
|
||||||
blob_writer.write_all(index_content.as_bytes())?;
|
blob_writer.write_all(index_content.as_bytes())?;
|
||||||
oids.push((name, blob_writer.commit()?));
|
oids.push((name, blob_writer.commit()?));
|
||||||
|
|
|
@ -68,10 +68,11 @@ pub async fn search_packages(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let (scope, name) = id.as_str();
|
let (scope, name) = id.as_str();
|
||||||
|
|
||||||
let versions: IndexFile =
|
let file: IndexFile =
|
||||||
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
|
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
|
||||||
|
|
||||||
let (latest_version, entry) = versions
|
let (latest_version, entry) = file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.max_by_key(|(v_id, _)| v_id.version())
|
.max_by_key(|(v_id, _)| v_id.version())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -79,17 +80,19 @@ pub async fn search_packages(
|
||||||
PackageResponse {
|
PackageResponse {
|
||||||
name: id.to_string(),
|
name: id.to_string(),
|
||||||
version: latest_version.version().to_string(),
|
version: latest_version.version().to_string(),
|
||||||
targets: versions
|
targets: file
|
||||||
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
||||||
.map(|(_, entry)| (&entry.target).into())
|
.map(|(_, entry)| (&entry.target).into())
|
||||||
.collect(),
|
.collect(),
|
||||||
description: entry.description.clone().unwrap_or_default(),
|
description: entry.description.clone().unwrap_or_default(),
|
||||||
published_at: versions
|
published_at: file
|
||||||
|
.entries
|
||||||
.values()
|
.values()
|
||||||
.max_by_key(|entry| entry.published_at)
|
.map(|entry| entry.published_at)
|
||||||
.unwrap()
|
.max()
|
||||||
.published_at,
|
.unwrap(),
|
||||||
license: entry.license.clone().unwrap_or_default(),
|
license: entry.license.clone().unwrap_or_default(),
|
||||||
authors: entry.authors.clone(),
|
authors: entry.authors.clone(),
|
||||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
||||||
use log::error;
|
|
||||||
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
|
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
@ -67,7 +66,7 @@ impl ResponseError for Error {
|
||||||
error: format!("archive is invalid: {e}"),
|
error: format!("archive is invalid: {e}"),
|
||||||
}),
|
}),
|
||||||
e => {
|
e => {
|
||||||
log::error!("unhandled error: {e:?}");
|
tracing::error!("unhandled error: {e:?}");
|
||||||
HttpResponse::InternalServerError().finish()
|
HttpResponse::InternalServerError().finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,19 +6,22 @@ use crate::{
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
middleware::{from_fn, Compress, Logger, NormalizePath, TrailingSlash},
|
middleware::{from_fn, Compress, NormalizePath, TrailingSlash},
|
||||||
rt::System,
|
rt::System,
|
||||||
web,
|
web,
|
||||||
web::PayloadConfig,
|
web::PayloadConfig,
|
||||||
App, HttpServer,
|
App, HttpServer,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use log::info;
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||||
AuthConfig, Project,
|
AuthConfig, Project,
|
||||||
};
|
};
|
||||||
use std::{env::current_dir, path::PathBuf};
|
use std::{env::current_dir, path::PathBuf};
|
||||||
|
use tracing::level_filters::LevelFilter;
|
||||||
|
use tracing_subscriber::{
|
||||||
|
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
|
||||||
|
};
|
||||||
|
|
||||||
mod auth;
|
mod auth;
|
||||||
mod endpoints;
|
mod endpoints;
|
||||||
|
@ -116,12 +119,12 @@ async fn run() -> std::io::Result<()> {
|
||||||
let app_data = web::Data::new(AppState {
|
let app_data = web::Data::new(AppState {
|
||||||
storage: {
|
storage: {
|
||||||
let storage = get_storage_from_env();
|
let storage = get_storage_from_env();
|
||||||
info!("storage: {storage}");
|
tracing::info!("storage: {storage}");
|
||||||
storage
|
storage
|
||||||
},
|
},
|
||||||
auth: {
|
auth: {
|
||||||
let auth = get_auth_from_env(&config);
|
let auth = get_auth_from_env(&config);
|
||||||
info!("auth: {auth}");
|
tracing::info!("auth: {auth}");
|
||||||
auth
|
auth
|
||||||
},
|
},
|
||||||
source: tokio::sync::Mutex::new(source),
|
source: tokio::sync::Mutex::new(source),
|
||||||
|
@ -140,14 +143,12 @@ async fn run() -> std::io::Result<()> {
|
||||||
.finish()
|
.finish()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
info!("listening on {address}:{port}");
|
|
||||||
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.wrap(sentry_actix::Sentry::with_transaction())
|
.wrap(sentry_actix::Sentry::with_transaction())
|
||||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||||
.wrap(Cors::permissive())
|
.wrap(Cors::permissive())
|
||||||
.wrap(Logger::default())
|
.wrap(tracing_actix_web::TracingLogger::default())
|
||||||
.wrap(Compress::default())
|
.wrap(Compress::default())
|
||||||
.app_data(app_data.clone())
|
.app_data(app_data.clone())
|
||||||
.route(
|
.route(
|
||||||
|
@ -200,12 +201,26 @@ async fn run() -> std::io::Result<()> {
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
let _ = dotenvy::dotenv();
|
let _ = dotenvy::dotenv();
|
||||||
|
|
||||||
let mut log_builder = pretty_env_logger::formatted_builder();
|
let tracing_env_filter = EnvFilter::builder()
|
||||||
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
|
.with_default_directive(LevelFilter::INFO.into())
|
||||||
|
.from_env_lossy()
|
||||||
|
.add_directive("reqwest=info".parse().unwrap())
|
||||||
|
.add_directive("rustls=info".parse().unwrap())
|
||||||
|
.add_directive("tokio_util=info".parse().unwrap())
|
||||||
|
.add_directive("goblin=info".parse().unwrap())
|
||||||
|
.add_directive("tower=info".parse().unwrap())
|
||||||
|
.add_directive("hyper=info".parse().unwrap())
|
||||||
|
.add_directive("h2=info".parse().unwrap());
|
||||||
|
|
||||||
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
|
tracing_subscriber::registry()
|
||||||
log::set_boxed_logger(Box::new(logger)).unwrap();
|
.with(tracing_env_filter)
|
||||||
log::set_max_level(log::LevelFilter::Info);
|
.with(
|
||||||
|
tracing_subscriber::fmt::layer()
|
||||||
|
.compact()
|
||||||
|
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
|
||||||
|
)
|
||||||
|
.with(sentry::integrations::tracing::layer())
|
||||||
|
.init();
|
||||||
|
|
||||||
let guard = sentry::init(sentry::ClientOptions {
|
let guard = sentry::init(sentry::ClientOptions {
|
||||||
release: sentry::release_name!(),
|
release: sentry::release_name!(),
|
||||||
|
@ -218,9 +233,9 @@ fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
if guard.is_enabled() {
|
if guard.is_enabled() {
|
||||||
std::env::set_var("RUST_BACKTRACE", "full");
|
std::env::set_var("RUST_BACKTRACE", "full");
|
||||||
info!("sentry initialized");
|
tracing::info!("sentry initialized");
|
||||||
} else {
|
} else {
|
||||||
info!("sentry **NOT** initialized");
|
tracing::info!("sentry **NOT** initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
System::new().block_on(run())
|
System::new().block_on(run())
|
||||||
|
|
|
@ -8,6 +8,8 @@ pub struct TargetInfo {
|
||||||
kind: TargetKind,
|
kind: TargetKind,
|
||||||
lib: bool,
|
lib: bool,
|
||||||
bin: bool,
|
bin: bool,
|
||||||
|
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||||
|
scripts: BTreeSet<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Target> for TargetInfo {
|
impl From<Target> for TargetInfo {
|
||||||
|
@ -22,6 +24,10 @@ impl From<&Target> for TargetInfo {
|
||||||
kind: target.kind(),
|
kind: target.kind(),
|
||||||
lib: target.lib_path().is_some(),
|
lib: target.lib_path().is_some(),
|
||||||
bin: target.bin_path().is_some(),
|
bin: target.bin_path().is_some(),
|
||||||
|
scripts: target
|
||||||
|
.scripts()
|
||||||
|
.map(|scripts| scripts.keys().cloned().collect())
|
||||||
|
.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,8 +104,8 @@ pub async fn make_search(
|
||||||
pin!(stream);
|
pin!(stream);
|
||||||
|
|
||||||
while let Some((pkg_name, mut file)) = stream.next().await {
|
while let Some((pkg_name, mut file)) = stream.next().await {
|
||||||
let Some((_, latest_entry)) = file.pop_last() else {
|
let Some((_, latest_entry)) = file.entries.pop_last() else {
|
||||||
log::warn!("no versions found for {pkg_name}");
|
tracing::error!("no versions found for {pkg_name}");
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ use keyring::Entry;
|
||||||
use reqwest::header::AUTHORIZATION;
|
use reqwest::header::AUTHORIZATION;
|
||||||
use serde::{ser::SerializeMap, Deserialize, Serialize};
|
use serde::{ser::SerializeMap, Deserialize, Serialize};
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Tokens(pub BTreeMap<gix::Url, String>);
|
pub struct Tokens(pub BTreeMap<gix::Url, String>);
|
||||||
|
@ -37,15 +38,20 @@ impl<'de> Deserialize<'de> for Tokens {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
||||||
let config = read_config().await?;
|
let config = read_config().await?;
|
||||||
if !config.tokens.0.is_empty() {
|
if !config.tokens.0.is_empty() {
|
||||||
|
tracing::debug!("using tokens from config");
|
||||||
return Ok(config.tokens);
|
return Ok(config.tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
||||||
Ok(entry) => match entry.get_password() {
|
Ok(entry) => match entry.get_password() {
|
||||||
Ok(token) => return serde_json::from_str(&token).context("failed to parse tokens"),
|
Ok(token) => {
|
||||||
|
tracing::debug!("using tokens from keyring");
|
||||||
|
return serde_json::from_str(&token).context("failed to parse tokens");
|
||||||
|
}
|
||||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
},
|
},
|
||||||
|
@ -56,16 +62,22 @@ pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
||||||
Ok(Tokens(BTreeMap::new()))
|
Ok(Tokens(BTreeMap::new()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
|
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
|
||||||
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
||||||
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
||||||
|
|
||||||
match entry.set_password(&json) {
|
match entry.set_password(&json) {
|
||||||
Ok(()) => return Ok(()),
|
Ok(()) => {
|
||||||
|
tracing::debug!("tokens saved to keyring");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tracing::debug!("tokens saved to config");
|
||||||
|
|
||||||
let mut config = read_config().await?;
|
let mut config = read_config().await?;
|
||||||
config.tokens = tokens;
|
config.tokens = tokens;
|
||||||
write_config(&config).await.map_err(Into::into)
|
write_config(&config).await.map_err(Into::into)
|
||||||
|
@ -86,6 +98,7 @@ struct UserResponse {
|
||||||
login: String,
|
login: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn get_token_login(
|
pub async fn get_token_login(
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
access_token: &str,
|
access_token: &str,
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::{collections::HashSet, str::FromStr};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
use colored::Colorize;
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
|
|
||||||
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
||||||
|
@ -62,7 +63,7 @@ impl AddCommand {
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||||
log::error!("index {index} not found");
|
println!("{}: index {index} not found", "error".red().bold());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +90,7 @@ impl AddCommand {
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||||
log::error!("wally index {index} not found");
|
println!("{}: wally index {index} not found", "error".red().bold());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,7 +146,7 @@ impl AddCommand {
|
||||||
.pop_last()
|
.pop_last()
|
||||||
.map(|(v_id, _)| v_id)
|
.map(|(v_id, _)| v_id)
|
||||||
else {
|
else {
|
||||||
log::error!("no versions found for package {specifier}");
|
println!("{}: no versions found for package", "error".red().bold());
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,22 +1,17 @@
|
||||||
use clap::Subcommand;
|
use clap::Subcommand;
|
||||||
|
|
||||||
mod default_index;
|
mod default_index;
|
||||||
mod scripts_repo;
|
|
||||||
|
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
pub enum ConfigCommands {
|
pub enum ConfigCommands {
|
||||||
/// Configuration for the default index
|
/// Configuration for the default index
|
||||||
DefaultIndex(default_index::DefaultIndexCommand),
|
DefaultIndex(default_index::DefaultIndexCommand),
|
||||||
|
|
||||||
/// Configuration for the scripts repository
|
|
||||||
ScriptsRepo(scripts_repo::ScriptsRepoCommand),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigCommands {
|
impl ConfigCommands {
|
||||||
pub async fn run(self) -> anyhow::Result<()> {
|
pub async fn run(self) -> anyhow::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
||||||
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run().await,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
use crate::cli::{
|
|
||||||
config::{read_config, write_config, CliConfig},
|
|
||||||
home_dir,
|
|
||||||
};
|
|
||||||
use anyhow::Context;
|
|
||||||
use clap::Args;
|
|
||||||
use fs_err::tokio as fs;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
|
||||||
pub struct ScriptsRepoCommand {
|
|
||||||
/// The new repo URL to set as default, don't pass any value to check the current default repo
|
|
||||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
|
||||||
repo: Option<gix::Url>,
|
|
||||||
|
|
||||||
/// Resets the default repo to the default value
|
|
||||||
#[arg(short, long, conflicts_with = "repo")]
|
|
||||||
reset: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ScriptsRepoCommand {
|
|
||||||
pub async fn run(self) -> anyhow::Result<()> {
|
|
||||||
let mut config = read_config().await?;
|
|
||||||
|
|
||||||
let repo = if self.reset {
|
|
||||||
Some(CliConfig::default().scripts_repo)
|
|
||||||
} else {
|
|
||||||
self.repo
|
|
||||||
};
|
|
||||||
|
|
||||||
match repo {
|
|
||||||
Some(repo) => {
|
|
||||||
config.scripts_repo = repo.clone();
|
|
||||||
write_config(&config).await?;
|
|
||||||
|
|
||||||
fs::remove_dir_all(home_dir()?.join("scripts"))
|
|
||||||
.await
|
|
||||||
.context("failed to remove scripts directory")?;
|
|
||||||
|
|
||||||
println!("scripts repo set to: {repo}");
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("current scripts repo: {}", config.scripts_repo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -2,10 +2,9 @@ use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
linking::generator::generate_bin_linking_module,
|
linking::generator::generate_bin_linking_module,
|
||||||
manifest::{target::TargetKind, DependencyType},
|
manifest::target::TargetKind,
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{
|
source::{
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
|
@ -17,6 +16,7 @@ use semver::VersionReq;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
|
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
|
||||||
};
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct ExecuteCommand {
|
pub struct ExecuteCommand {
|
||||||
|
@ -34,12 +34,7 @@ pub struct ExecuteCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExecuteCommand {
|
impl ExecuteCommand {
|
||||||
pub async fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let index = match self.index {
|
let index = match self.index {
|
||||||
Some(index) => Some(index),
|
Some(index) => Some(index),
|
||||||
None => read_config().await.ok().map(|c| c.default_index),
|
None => read_config().await.ok().map(|c| c.default_index),
|
||||||
|
@ -83,7 +78,7 @@ impl ExecuteCommand {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
log::info!("found package {}@{version}", pkg_ref.name);
|
println!("using {}@{version}", pkg_ref.name);
|
||||||
|
|
||||||
let tmp_dir = project.cas_dir().join(".tmp");
|
let tmp_dir = project.cas_dir().join(".tmp");
|
||||||
fs::create_dir_all(&tmp_dir)
|
fs::create_dir_all(&tmp_dir)
|
||||||
|
@ -116,43 +111,32 @@ impl ExecuteCommand {
|
||||||
.dependency_graph(None, &mut refreshed_sources, true)
|
.dependency_graph(None, &mut refreshed_sources, true)
|
||||||
.await
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
|
|
||||||
let (rx, downloaded_graph) = project
|
let (rx, downloaded_graph) = project
|
||||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, true, true)
|
.download_and_link(
|
||||||
|
&graph,
|
||||||
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
|
&reqwest,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
|_| async { Ok::<_, std::io::Error>(()) },
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to download dependencies")?;
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
progress_bar(
|
progress_bar(
|
||||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
rx,
|
rx,
|
||||||
&multi,
|
|
||||||
"📥 ".to_string(),
|
"📥 ".to_string(),
|
||||||
"downloading dependencies".to_string(),
|
"downloading dependencies".to_string(),
|
||||||
"downloaded dependencies".to_string(),
|
"downloaded dependencies".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let downloaded_graph = Arc::into_inner(downloaded_graph)
|
downloaded_graph
|
||||||
.unwrap()
|
|
||||||
.into_inner()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
project
|
|
||||||
.link_dependencies(
|
|
||||||
&downloaded_graph
|
|
||||||
.into_iter()
|
|
||||||
.map(|(n, v)| {
|
|
||||||
(
|
|
||||||
n,
|
|
||||||
v.into_iter()
|
|
||||||
.filter(|(_, n)| n.node.resolved_ty != DependencyType::Dev)
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.context("failed to link dependencies")?;
|
.context("failed to download & link dependencies")?;
|
||||||
|
|
||||||
let mut caller =
|
let mut caller =
|
||||||
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
||||||
|
|
|
@ -1,28 +1,39 @@
|
||||||
use std::{path::Path, str::FromStr};
|
use crate::cli::config::read_config;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use inquire::validator::Validation;
|
use inquire::validator::Validation;
|
||||||
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
errors::ManifestReadError, names::PackageName, scripts::ScriptName, Project, DEFAULT_INDEX_NAME,
|
errors::ManifestReadError,
|
||||||
|
manifest::{target::TargetKind, DependencyType},
|
||||||
|
names::PackageName,
|
||||||
|
source::{
|
||||||
|
git_index::GitBasedSource,
|
||||||
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
|
specifiers::DependencySpecifiers,
|
||||||
|
traits::PackageSource,
|
||||||
|
},
|
||||||
|
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
|
use semver::VersionReq;
|
||||||
use crate::cli::{config::read_config, HOME_DIR};
|
use std::{collections::HashSet, fmt::Display, str::FromStr};
|
||||||
use fs_err::tokio as fs;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct InitCommand {}
|
pub struct InitCommand {}
|
||||||
|
|
||||||
fn script_contents(path: &Path) -> String {
|
#[derive(Debug)]
|
||||||
format!(
|
enum PackageNameOrCustom {
|
||||||
r#"local process = require("@lune/process")
|
PackageName(PackageName),
|
||||||
local home_dir = if process.os == "windows" then process.env.userprofile else process.env.HOME
|
Custom,
|
||||||
|
}
|
||||||
|
|
||||||
require(home_dir .. {:?})"#,
|
impl Display for PackageNameOrCustom {
|
||||||
format!("/{HOME_DIR}/scripts/{}", path.display())
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
)
|
match self {
|
||||||
|
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
|
||||||
|
PackageNameOrCustom::Custom => write!(f, "custom"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InitCommand {
|
impl InitCommand {
|
||||||
|
@ -39,7 +50,7 @@ impl InitCommand {
|
||||||
let mut manifest = toml_edit::DocumentMut::new();
|
let mut manifest = toml_edit::DocumentMut::new();
|
||||||
|
|
||||||
manifest["name"] = toml_edit::value(
|
manifest["name"] = toml_edit::value(
|
||||||
inquire::Text::new("What is the name of the project?")
|
inquire::Text::new("what is the name of the project?")
|
||||||
.with_validator(|name: &str| {
|
.with_validator(|name: &str| {
|
||||||
Ok(match PackageName::from_str(name) {
|
Ok(match PackageName::from_str(name) {
|
||||||
Ok(_) => Validation::Valid,
|
Ok(_) => Validation::Valid,
|
||||||
|
@ -51,20 +62,19 @@ impl InitCommand {
|
||||||
);
|
);
|
||||||
manifest["version"] = toml_edit::value("0.1.0");
|
manifest["version"] = toml_edit::value("0.1.0");
|
||||||
|
|
||||||
let description =
|
let description = inquire::Text::new("what is the description of the project?")
|
||||||
inquire::Text::new("What is the description of the project? (leave empty for none)")
|
.with_help_message("a short description of the project. leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
if !description.is_empty() {
|
if !description.is_empty() {
|
||||||
manifest["description"] = toml_edit::value(description);
|
manifest["description"] = toml_edit::value(description);
|
||||||
}
|
}
|
||||||
|
|
||||||
let authors = inquire::Text::new(
|
let authors = inquire::Text::new("who are the authors of this project?")
|
||||||
"Who are the authors of this project? (leave empty for none, comma separated)",
|
.with_help_message("comma separated list. leave empty for none")
|
||||||
)
|
.prompt()
|
||||||
.prompt()
|
.unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let authors = authors
|
let authors = authors
|
||||||
.split(',')
|
.split(',')
|
||||||
|
@ -76,106 +86,177 @@ impl InitCommand {
|
||||||
manifest["authors"] = toml_edit::value(authors);
|
manifest["authors"] = toml_edit::value(authors);
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = inquire::Text::new(
|
let repo = inquire::Text::new("what is the repository URL of this project?")
|
||||||
"What is the repository URL of this project? (leave empty for none)",
|
.with_validator(|repo: &str| {
|
||||||
)
|
if repo.is_empty() {
|
||||||
.with_validator(|repo: &str| {
|
return Ok(Validation::Valid);
|
||||||
if repo.is_empty() {
|
}
|
||||||
return Ok(Validation::Valid);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(match url::Url::parse(repo) {
|
Ok(match url::Url::parse(repo) {
|
||||||
Ok(_) => Validation::Valid,
|
Ok(_) => Validation::Valid,
|
||||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
.with_help_message("leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if !repo.is_empty() {
|
if !repo.is_empty() {
|
||||||
manifest["repository"] = toml_edit::value(repo);
|
manifest["repository"] = toml_edit::value(repo);
|
||||||
}
|
}
|
||||||
|
|
||||||
let license =
|
let license = inquire::Text::new("what is the license of this project?")
|
||||||
inquire::Text::new("What is the license of this project? (leave empty for none)")
|
.with_initial_value("MIT")
|
||||||
.with_initial_value("MIT")
|
.with_help_message("an SPDX license identifier. leave empty for none")
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if !license.is_empty() {
|
if !license.is_empty() {
|
||||||
manifest["license"] = toml_edit::value(license);
|
manifest["license"] = toml_edit::value(license);
|
||||||
}
|
}
|
||||||
|
|
||||||
let target_env = inquire::Select::new(
|
let target_env = inquire::Select::new(
|
||||||
"What environment are you targeting for your package?",
|
"what environment are you targeting for your package?",
|
||||||
vec!["roblox", "roblox_server", "lune", "luau"],
|
TargetKind::VARIANTS.to_vec(),
|
||||||
)
|
)
|
||||||
.prompt()
|
.prompt()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
["environment"] = toml_edit::value(target_env);
|
["environment"] = toml_edit::value(target_env.to_string());
|
||||||
|
|
||||||
if target_env == "roblox"
|
let source = PesdePackageSource::new(read_config().await?.default_index);
|
||||||
|| target_env == "roblox_server"
|
|
||||||
|| inquire::Confirm::new(&format!(
|
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
"Would you like to setup a default {} script?",
|
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
|
||||||
))
|
if target_env.is_roblox()
|
||||||
.prompt()
|
|| inquire::prompt_confirmation(
|
||||||
|
"would you like to setup default Roblox compatibility scripts?",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
{
|
{
|
||||||
let folder = project
|
PackageSource::refresh(&source, &project)
|
||||||
.package_dir()
|
|
||||||
.join(concat!(".", env!("CARGO_PKG_NAME")));
|
|
||||||
fs::create_dir_all(&folder)
|
|
||||||
.await
|
.await
|
||||||
.context("failed to create scripts folder")?;
|
.context("failed to refresh package source")?;
|
||||||
|
let config = source
|
||||||
|
.config(&project)
|
||||||
|
.await
|
||||||
|
.context("failed to get source config")?;
|
||||||
|
|
||||||
fs::write(
|
let scripts_package = if config.scripts_packages.is_empty() {
|
||||||
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
|
PackageNameOrCustom::Custom
|
||||||
script_contents(Path::new(&format!(
|
} else {
|
||||||
"lune/rojo/{}.luau",
|
inquire::Select::new(
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
"which scripts package do you want to use?",
|
||||||
))),
|
config
|
||||||
)
|
.scripts_packages
|
||||||
.await
|
.into_iter()
|
||||||
.context("failed to write sync config generator script file")?;
|
.map(PackageNameOrCustom::PackageName)
|
||||||
|
.chain(std::iter::once(PackageNameOrCustom::Custom))
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
.prompt()
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
let scripts_package = match scripts_package {
|
||||||
fs::write(
|
PackageNameOrCustom::PackageName(p) => Some(p),
|
||||||
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
|
PackageNameOrCustom::Custom => {
|
||||||
script_contents(Path::new(&format!(
|
let name = inquire::Text::new("which scripts package to use?")
|
||||||
"lune/rojo/{}.luau",
|
.with_validator(|name: &str| {
|
||||||
ScriptName::SourcemapGenerator
|
if name.is_empty() {
|
||||||
))),
|
return Ok(Validation::Valid);
|
||||||
)
|
}
|
||||||
.await
|
|
||||||
.context("failed to write sourcemap generator script file")?;
|
|
||||||
|
|
||||||
let scripts =
|
Ok(match PackageName::from_str(name) {
|
||||||
manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
Ok(_) => Validation::Valid,
|
||||||
|
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.with_help_message("leave empty for none")
|
||||||
|
.prompt()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
scripts[&ScriptName::RobloxSyncConfigGenerator.to_string()] =
|
if name.is_empty() {
|
||||||
toml_edit::value(format!(
|
None
|
||||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
} else {
|
||||||
ScriptName::RobloxSyncConfigGenerator
|
Some(PackageName::from_str(&name).unwrap())
|
||||||
));
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
if let Some(scripts_pkg_name) = scripts_package {
|
||||||
{
|
let (v_id, pkg_ref) = source
|
||||||
scripts[&ScriptName::SourcemapGenerator.to_string()] = toml_edit::value(format!(
|
.resolve(
|
||||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
&PesdeDependencySpecifier {
|
||||||
ScriptName::SourcemapGenerator
|
name: scripts_pkg_name,
|
||||||
));
|
version: VersionReq::STAR,
|
||||||
|
index: None,
|
||||||
|
target: None,
|
||||||
|
},
|
||||||
|
&project,
|
||||||
|
TargetKind::Lune,
|
||||||
|
&mut HashSet::new(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to resolve scripts package")?
|
||||||
|
.1
|
||||||
|
.pop_last()
|
||||||
|
.context("scripts package not found")?;
|
||||||
|
|
||||||
|
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
|
||||||
|
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
|
||||||
|
};
|
||||||
|
|
||||||
|
let scripts_field = &mut manifest["scripts"]
|
||||||
|
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||||
|
|
||||||
|
for script_name in scripts.keys() {
|
||||||
|
scripts_field[script_name] = toml_edit::value(format!(
|
||||||
|
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let dev_deps = &mut manifest["dev_dependencies"]
|
||||||
|
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||||
|
|
||||||
|
let field = &mut dev_deps["scripts"];
|
||||||
|
field["name"] = toml_edit::value(pkg_ref.name.to_string());
|
||||||
|
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
|
||||||
|
field["target"] = toml_edit::value(v_id.target().to_string());
|
||||||
|
|
||||||
|
for (alias, (spec, ty)) in pkg_ref.dependencies {
|
||||||
|
if ty != DependencyType::Peer {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let DependencySpecifiers::Pesde(spec) = spec else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let field = &mut dev_deps[alias];
|
||||||
|
field["name"] = toml_edit::value(spec.name.to_string());
|
||||||
|
field["version"] = toml_edit::value(spec.version.to_string());
|
||||||
|
field["target"] =
|
||||||
|
toml_edit::value(spec.target.unwrap_or_else(|| *v_id.target()).to_string());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
"no scripts package configured, this can cause issues with Roblox compatibility".red()
|
||||||
|
);
|
||||||
|
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
|
||||||
[DEFAULT_INDEX_NAME] =
|
|
||||||
toml_edit::value(read_config().await?.default_index.to_bstring().to_string());
|
|
||||||
|
|
||||||
project.write_manifest(manifest.to_string()).await?;
|
project.write_manifest(manifest.to_string()).await?;
|
||||||
|
|
||||||
println!("{}", "initialized project".green());
|
println!(
|
||||||
|
"{}\n{}: run `install` to fully finish setup",
|
||||||
|
"initialized project".green(),
|
||||||
|
"tip".cyan().bold()
|
||||||
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,20 @@
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
bin_dir, files::make_executable, progress_bar, repos::update_scripts, run_on_workspace_members,
|
bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
|
||||||
up_to_date_lockfile,
|
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::{ColoredString, Colorize};
|
use colored::{ColoredString, Colorize};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
lockfile::Lockfile,
|
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
|
||||||
manifest::{target::TargetKind, DependencyType},
|
MANIFEST_FILE_NAME,
|
||||||
Project, MANIFEST_FILE_NAME,
|
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeSet, HashMap, HashSet},
|
collections::{BTreeSet, HashMap, HashSet},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args, Copy, Clone)]
|
#[derive(Debug, Args, Copy, Clone)]
|
||||||
pub struct InstallCommand {
|
pub struct InstallCommand {
|
||||||
|
@ -46,12 +44,8 @@ fn bin_link_file(alias: &str) -> String {
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
let prefix = String::new();
|
|
||||||
#[cfg(unix)]
|
|
||||||
let prefix = "#!/usr/bin/env -S lune run\n";
|
|
||||||
format!(
|
format!(
|
||||||
r#"{prefix}local process = require("@lune/process")
|
r#"local process = require("@lune/process")
|
||||||
local fs = require("@lune/fs")
|
local fs = require("@lune/fs")
|
||||||
local stdio = require("@lune/stdio")
|
local stdio = require("@lune/stdio")
|
||||||
|
|
||||||
|
@ -81,21 +75,20 @@ stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the r
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
const JOBS: u8 = 6;
|
|
||||||
#[cfg(not(feature = "patches"))]
|
|
||||||
const JOBS: u8 = 5;
|
const JOBS: u8 = 5;
|
||||||
|
#[cfg(not(feature = "patches"))]
|
||||||
|
const JOBS: u8 = 4;
|
||||||
|
|
||||||
fn job(n: u8) -> ColoredString {
|
fn job(n: u8) -> ColoredString {
|
||||||
format!("[{n}/{JOBS}]").dimmed().bold()
|
format!("[{n}/{JOBS}]").dimmed().bold()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
#[error(transparent)]
|
||||||
|
struct CallbackError(#[from] anyhow::Error);
|
||||||
|
|
||||||
impl InstallCommand {
|
impl InstallCommand {
|
||||||
pub async fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
|
@ -117,10 +110,10 @@ impl InstallCommand {
|
||||||
match project.deser_lockfile().await {
|
match project.deser_lockfile().await {
|
||||||
Ok(lockfile) => {
|
Ok(lockfile) => {
|
||||||
if lockfile.overrides != manifest.overrides {
|
if lockfile.overrides != manifest.overrides {
|
||||||
log::debug!("overrides are different");
|
tracing::debug!("overrides are different");
|
||||||
None
|
None
|
||||||
} else if lockfile.target != manifest.target.kind() {
|
} else if lockfile.target != manifest.target.kind() {
|
||||||
log::debug!("target kind is different");
|
tracing::debug!("target kind is different");
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(lockfile)
|
Some(lockfile)
|
||||||
|
@ -135,9 +128,6 @@ impl InstallCommand {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let project_2 = project.clone();
|
|
||||||
let update_scripts_handle = tokio::spawn(async move { update_scripts(&project_2).await });
|
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"\n{}\n",
|
"\n{}\n",
|
||||||
format!("[now installing {} {}]", manifest.name, manifest.target)
|
format!("[now installing {} {}]", manifest.name, manifest.target)
|
||||||
|
@ -157,7 +147,7 @@ impl InstallCommand {
|
||||||
deleted_folders
|
deleted_folders
|
||||||
.entry(folder.to_string())
|
.entry(folder.to_string())
|
||||||
.or_insert_with(|| async move {
|
.or_insert_with(|| async move {
|
||||||
log::debug!("deleting the {folder} folder");
|
tracing::debug!("deleting the {folder} folder");
|
||||||
|
|
||||||
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
|
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
|
||||||
.await
|
.await
|
||||||
|
@ -198,122 +188,121 @@ impl InstallCommand {
|
||||||
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
|
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
|
||||||
.await
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
|
|
||||||
update_scripts_handle.await??;
|
let bin_folder = bin_dir().await?;
|
||||||
|
|
||||||
let downloaded_graph = {
|
let downloaded_graph = {
|
||||||
let (rx, downloaded_graph) = project
|
let (rx, downloaded_graph) = project
|
||||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, self.prod, true)
|
.download_and_link(
|
||||||
|
&graph,
|
||||||
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
|
&reqwest,
|
||||||
|
self.prod,
|
||||||
|
true,
|
||||||
|
|graph| {
|
||||||
|
let graph = graph.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
try_join_all(
|
||||||
|
graph
|
||||||
|
.values()
|
||||||
|
.flat_map(|versions| versions.values())
|
||||||
|
.filter(|node| node.target.bin_path().is_some())
|
||||||
|
.filter_map(|node| node.node.direct.as_ref())
|
||||||
|
.map(|(alias, _, _)| alias)
|
||||||
|
.filter(|alias| {
|
||||||
|
if *alias == env!("CARGO_BIN_NAME") {
|
||||||
|
tracing::warn!(
|
||||||
|
"package {alias} has the same name as the CLI, skipping bin link"
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
})
|
||||||
|
.map(|alias| {
|
||||||
|
let bin_folder = bin_folder.clone();
|
||||||
|
async move {
|
||||||
|
let bin_exec_file = bin_folder.join(alias).with_extension(std::env::consts::EXE_EXTENSION);
|
||||||
|
|
||||||
|
let impl_folder = bin_folder.join(".impl");
|
||||||
|
fs::create_dir_all(&impl_folder).await.context("failed to create bin link folder")?;
|
||||||
|
|
||||||
|
let bin_file = impl_folder.join(alias).with_extension("luau");
|
||||||
|
fs::write(&bin_file, bin_link_file(alias))
|
||||||
|
.await
|
||||||
|
.context("failed to write bin link file")?;
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
fs::copy(
|
||||||
|
std::env::current_exe()
|
||||||
|
.context("failed to get current executable path")?,
|
||||||
|
&bin_exec_file,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to copy bin link file")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
{
|
||||||
|
fs::write(
|
||||||
|
&bin_exec_file,
|
||||||
|
format!(r#"#!/bin/sh
|
||||||
|
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to link bin link file")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
make_executable(&bin_exec_file).await.context("failed to make bin link file executable")?;
|
||||||
|
|
||||||
|
Ok::<_, CallbackError>(())
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to download dependencies")?;
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
progress_bar(
|
progress_bar(
|
||||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
rx,
|
rx,
|
||||||
&multi,
|
|
||||||
format!("{} 📥 ", job(3)),
|
format!("{} 📥 ", job(3)),
|
||||||
"downloading dependencies".to_string(),
|
"downloading dependencies".to_string(),
|
||||||
"downloaded dependencies".to_string(),
|
"downloaded dependencies".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Arc::into_inner(downloaded_graph)
|
|
||||||
.unwrap()
|
|
||||||
.into_inner()
|
|
||||||
.unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let filtered_graph = if self.prod {
|
|
||||||
downloaded_graph
|
downloaded_graph
|
||||||
.clone()
|
.await
|
||||||
.into_iter()
|
.context("failed to download & link dependencies")?
|
||||||
.map(|(n, v)| {
|
|
||||||
(
|
|
||||||
n,
|
|
||||||
v.into_iter()
|
|
||||||
.filter(|(_, n)| n.node.resolved_ty != DependencyType::Dev)
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
downloaded_graph.clone()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
{
|
{
|
||||||
let rx = project
|
let rx = project
|
||||||
.apply_patches(&filtered_graph)
|
.apply_patches(&filter_graph(&downloaded_graph, self.prod))
|
||||||
.await
|
.await
|
||||||
.context("failed to apply patches")?;
|
.context("failed to apply patches")?;
|
||||||
|
|
||||||
progress_bar(
|
progress_bar(
|
||||||
manifest.patches.values().map(|v| v.len() as u64).sum(),
|
manifest.patches.values().map(|v| v.len() as u64).sum(),
|
||||||
rx,
|
rx,
|
||||||
&multi,
|
format!("{} 🩹 ", job(JOBS - 1)),
|
||||||
format!("{} 🩹 ", job(4)),
|
|
||||||
"applying patches".to_string(),
|
"applying patches".to_string(),
|
||||||
"applied patches".to_string(),
|
"applied patches".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("{} 🗺️ linking dependencies", job(JOBS - 1));
|
|
||||||
|
|
||||||
let bin_folder = bin_dir().await?;
|
|
||||||
|
|
||||||
try_join_all(
|
|
||||||
filtered_graph
|
|
||||||
.values()
|
|
||||||
.flat_map(|versions| versions.values())
|
|
||||||
.filter(|node| node.target.bin_path().is_some())
|
|
||||||
.filter_map(|node| node.node.direct.as_ref())
|
|
||||||
.map(|(alias, _, _)| alias)
|
|
||||||
.filter(|alias| {
|
|
||||||
if *alias == env!("CARGO_BIN_NAME") {
|
|
||||||
log::warn!(
|
|
||||||
"package {alias} has the same name as the CLI, skipping bin link"
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
})
|
|
||||||
.map(|alias| {
|
|
||||||
let bin_folder = bin_folder.clone();
|
|
||||||
async move {
|
|
||||||
let bin_file = bin_folder.join(alias);
|
|
||||||
fs::write(&bin_file, bin_link_file(alias))
|
|
||||||
.await
|
|
||||||
.context("failed to write bin link file")?;
|
|
||||||
|
|
||||||
make_executable(&bin_file)
|
|
||||||
.await
|
|
||||||
.context("failed to make bin link executable")?;
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
|
|
||||||
fs::copy(
|
|
||||||
std::env::current_exe()
|
|
||||||
.context("failed to get current executable path")?,
|
|
||||||
&bin_file,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.context("failed to copy bin link file")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok::<_, anyhow::Error>(())
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
project
|
|
||||||
.link_dependencies(&filtered_graph)
|
|
||||||
.await
|
|
||||||
.context("failed to link dependencies")?;
|
|
||||||
|
|
||||||
println!("{} 🧹 finishing up", job(JOBS));
|
println!("{} 🧹 finishing up", job(JOBS));
|
||||||
|
|
||||||
project
|
project
|
||||||
|
@ -326,9 +315,8 @@ impl InstallCommand {
|
||||||
graph: downloaded_graph,
|
graph: downloaded_graph,
|
||||||
|
|
||||||
workspace: run_on_workspace_members(&project, |project| {
|
workspace: run_on_workspace_members(&project, |project| {
|
||||||
let multi = multi.clone();
|
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
async move { Box::pin(self.run(project, multi, reqwest)).await }
|
async move { Box::pin(self.run(project, reqwest)).await }
|
||||||
})
|
})
|
||||||
.await?,
|
.await?,
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::Project;
|
use pesde::Project;
|
||||||
|
|
||||||
mod add;
|
mod add;
|
||||||
|
@ -72,18 +71,13 @@ pub enum Subcommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Subcommand {
|
impl Subcommand {
|
||||||
pub async fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
match self {
|
match self {
|
||||||
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
||||||
Subcommand::Config(config) => config.run().await,
|
Subcommand::Config(config) => config.run().await,
|
||||||
Subcommand::Init(init) => init.run(project).await,
|
Subcommand::Init(init) => init.run(project).await,
|
||||||
Subcommand::Run(run) => run.run(project).await,
|
Subcommand::Run(run) => run.run(project).await,
|
||||||
Subcommand::Install(install) => install.run(project, multi, reqwest).await,
|
Subcommand::Install(install) => install.run(project, reqwest).await,
|
||||||
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
||||||
|
@ -94,9 +88,9 @@ impl Subcommand {
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
||||||
Subcommand::Add(add) => add.run(project).await,
|
Subcommand::Add(add) => add.run(project).await,
|
||||||
Subcommand::Update(update) => update.run(project, multi, reqwest).await,
|
Subcommand::Update(update) => update.run(project, reqwest).await,
|
||||||
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
||||||
Subcommand::Execute(execute) => execute.run(project, multi, reqwest).await,
|
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,11 +4,13 @@ use async_compression::Level;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
|
#[allow(deprecated)]
|
||||||
use pesde::{
|
use pesde::{
|
||||||
manifest::{target::Target, DependencyType},
|
manifest::{target::Target, DependencyType},
|
||||||
matching_globs_old_behaviour,
|
matching_globs_old_behaviour,
|
||||||
scripts::ScriptName,
|
scripts::ScriptName,
|
||||||
source::{
|
source::{
|
||||||
|
git_index::GitBasedSource,
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::PackageSource,
|
traits::PackageSource,
|
||||||
|
@ -68,7 +70,10 @@ impl PublishCommand {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.target.lib_path().is_none() && manifest.target.bin_path().is_none() {
|
if manifest.target.lib_path().is_none()
|
||||||
|
&& manifest.target.bin_path().is_none()
|
||||||
|
&& manifest.target.scripts().is_none_or(|s| s.is_empty())
|
||||||
|
{
|
||||||
anyhow::bail!("no exports found in target");
|
anyhow::bail!("no exports found in target");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,15 +106,21 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let canonical_package_dir = project
|
||||||
|
.package_dir()
|
||||||
|
.canonicalize()
|
||||||
|
.context("failed to canonicalize package directory")?;
|
||||||
|
|
||||||
let mut archive = tokio_tar::Builder::new(
|
let mut archive = tokio_tar::Builder::new(
|
||||||
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
|
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut display_build_files: Vec<String> = vec![];
|
let mut display_build_files: Vec<String> = vec![];
|
||||||
|
|
||||||
let (lib_path, bin_path, target_kind) = (
|
let (lib_path, bin_path, scripts, target_kind) = (
|
||||||
manifest.target.lib_path().cloned(),
|
manifest.target.lib_path().cloned(),
|
||||||
manifest.target.bin_path().cloned(),
|
manifest.target.bin_path().cloned(),
|
||||||
|
manifest.target.scripts().cloned(),
|
||||||
manifest.target.kind(),
|
manifest.target.kind(),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -119,6 +130,7 @@ impl PublishCommand {
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
let mut paths = matching_globs_old_behaviour(
|
let mut paths = matching_globs_old_behaviour(
|
||||||
project.package_dir(),
|
project.package_dir(),
|
||||||
manifest.includes.iter().map(|s| s.as_str()),
|
manifest.includes.iter().map(|s| s.as_str()),
|
||||||
|
@ -188,21 +200,24 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let export_path = relative_export_path
|
let export_path = relative_export_path.to_path(&canonical_package_dir);
|
||||||
.to_path(project.package_dir())
|
|
||||||
|
let contents = match fs::read_to_string(&export_path).await {
|
||||||
|
Ok(contents) => contents,
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
anyhow::bail!("{name} does not exist");
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||||
|
anyhow::bail!("{name} must point to a file");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e).context(format!("failed to read {name}"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let export_path = export_path
|
||||||
.canonicalize()
|
.canonicalize()
|
||||||
.context(format!("failed to canonicalize {name}"))?;
|
.context(format!("failed to canonicalize {name}"))?;
|
||||||
if !export_path.exists() {
|
|
||||||
anyhow::bail!("{name} points to non-existent file");
|
|
||||||
}
|
|
||||||
|
|
||||||
if !export_path.is_file() {
|
|
||||||
anyhow::bail!("{name} must point to a file");
|
|
||||||
}
|
|
||||||
|
|
||||||
let contents = fs::read_to_string(&export_path)
|
|
||||||
.await
|
|
||||||
.context(format!("failed to read {name}"))?;
|
|
||||||
|
|
||||||
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||||
errs.into_iter()
|
errs.into_iter()
|
||||||
|
@ -223,7 +238,12 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
_ => anyhow::bail!("{name} must be within project directory"),
|
_ => anyhow::bail!("{name} must be within project directory"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if paths.insert(PathBuf::from(relative_export_path.as_str())) {
|
if paths.insert(
|
||||||
|
export_path
|
||||||
|
.strip_prefix(&canonical_package_dir)
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf(),
|
||||||
|
) {
|
||||||
println!(
|
println!(
|
||||||
"{}: {name} was not included, adding {relative_export_path}",
|
"{}: {name} was not included, adding {relative_export_path}",
|
||||||
"warn".yellow().bold()
|
"warn".yellow().bold()
|
||||||
|
@ -270,6 +290,50 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(scripts) = scripts {
|
||||||
|
for (name, path) in scripts {
|
||||||
|
let script_path = path.to_path(&canonical_package_dir);
|
||||||
|
|
||||||
|
let contents = match fs::read_to_string(&script_path).await {
|
||||||
|
Ok(contents) => contents,
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
anyhow::bail!("script {name} does not exist");
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||||
|
anyhow::bail!("script {name} must point to a file");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(e).context(format!("failed to read script {name}"));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let script_path = script_path
|
||||||
|
.canonicalize()
|
||||||
|
.context(format!("failed to canonicalize script {name}"))?;
|
||||||
|
|
||||||
|
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||||
|
errs.into_iter()
|
||||||
|
.map(|err| err.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ")
|
||||||
|
}) {
|
||||||
|
anyhow::bail!("script {name} is not a valid Luau file: {err}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if paths.insert(
|
||||||
|
script_path
|
||||||
|
.strip_prefix(&canonical_package_dir)
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf(),
|
||||||
|
) {
|
||||||
|
println!(
|
||||||
|
"{}: script {name} was not included, adding {path}",
|
||||||
|
"warn".yellow().bold()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for relative_path in &paths {
|
for relative_path in &paths {
|
||||||
let path = project.package_dir().join(relative_path);
|
let path = project.package_dir().join(relative_path);
|
||||||
|
|
||||||
|
@ -301,10 +365,6 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
let mut has_wally = false;
|
|
||||||
let mut has_git = false;
|
|
||||||
|
|
||||||
for specifier in manifest
|
for specifier in manifest
|
||||||
.dependencies
|
.dependencies
|
||||||
.values_mut()
|
.values_mut()
|
||||||
|
@ -328,8 +388,6 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
DependencySpecifiers::Wally(specifier) => {
|
DependencySpecifiers::Wally(specifier) => {
|
||||||
has_wally = true;
|
|
||||||
|
|
||||||
let index_name = specifier
|
let index_name = specifier
|
||||||
.index
|
.index
|
||||||
.as_deref()
|
.as_deref()
|
||||||
|
@ -345,9 +403,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {
|
DependencySpecifiers::Git(_) => {}
|
||||||
has_git = true;
|
|
||||||
}
|
|
||||||
DependencySpecifiers::Workspace(spec) => {
|
DependencySpecifiers::Workspace(spec) => {
|
||||||
let pkg_ref = WorkspacePackageSource
|
let pkg_ref = WorkspacePackageSource
|
||||||
.resolve(spec, project, target_kind, &mut HashSet::new())
|
.resolve(spec, project, target_kind, &mut HashSet::new())
|
||||||
|
@ -445,6 +501,16 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
.bin_path()
|
.bin_path()
|
||||||
.map_or("(none)".to_string(), |p| p.to_string())
|
.map_or("(none)".to_string(), |p| p.to_string())
|
||||||
);
|
);
|
||||||
|
println!(
|
||||||
|
"\tscripts: {}",
|
||||||
|
manifest
|
||||||
|
.target
|
||||||
|
.scripts()
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.map_or("(none)".to_string(), |s| {
|
||||||
|
s.keys().cloned().collect::<Vec<_>>().join(", ")
|
||||||
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -509,8 +575,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
.get(&self.index)
|
.get(&self.index)
|
||||||
.context(format!("missing index {}", self.index))?;
|
.context(format!("missing index {}", self.index))?;
|
||||||
let source = PesdePackageSource::new(index_url.clone());
|
let source = PesdePackageSource::new(index_url.clone());
|
||||||
source
|
PackageSource::refresh(&source, project)
|
||||||
.refresh(project)
|
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh source")?;
|
.context("failed to refresh source")?;
|
||||||
let config = source
|
let config = source
|
||||||
|
@ -526,15 +591,23 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
manifest.all_dependencies().context("dependency conflict")?;
|
let deps = manifest.all_dependencies().context("dependency conflict")?;
|
||||||
|
|
||||||
if !config.git_allowed && has_git {
|
if let Some((disallowed, _)) = deps.iter().find(|(_, (spec, _))| match spec {
|
||||||
anyhow::bail!("git dependencies are not allowed on this index");
|
DependencySpecifiers::Pesde(spec) => {
|
||||||
}
|
!config.other_registries_allowed.is_allowed_or_same(
|
||||||
|
source.repo_url().clone(),
|
||||||
#[cfg(feature = "wally-compat")]
|
gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap(),
|
||||||
if !config.wally_allowed && has_wally {
|
)
|
||||||
anyhow::bail!("wally dependencies are not allowed on this index");
|
}
|
||||||
|
DependencySpecifiers::Git(spec) => !config.git_allowed.is_allowed(spec.repo.clone()),
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
DependencySpecifiers::Wally(spec) => !config
|
||||||
|
.wally_allowed
|
||||||
|
.is_allowed(gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap()),
|
||||||
|
_ => false,
|
||||||
|
}) {
|
||||||
|
anyhow::bail!("dependency `{disallowed}` is not allowed on this index");
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.dry_run {
|
if self.dry_run {
|
||||||
|
@ -553,7 +626,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
.body(archive);
|
.body(archive);
|
||||||
|
|
||||||
if let Some(token) = project.auth_config().tokens().get(index_url) {
|
if let Some(token) = project.auth_config().tokens().get(index_url) {
|
||||||
log::debug!("using token for {index_url}");
|
tracing::debug!("using token for {index_url}");
|
||||||
request = request.header(AUTHORIZATION, token);
|
request = request.header(AUTHORIZATION, token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::cli::{repos::update_scripts, up_to_date_lockfile};
|
use crate::cli::up_to_date_lockfile;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use futures::{StreamExt, TryStreamExt};
|
use futures::{StreamExt, TryStreamExt};
|
||||||
|
@ -27,34 +27,29 @@ pub struct RunCommand {
|
||||||
impl RunCommand {
|
impl RunCommand {
|
||||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let run = |root: PathBuf, file_path: PathBuf| {
|
let run = |root: PathBuf, file_path: PathBuf| {
|
||||||
let fut = update_scripts(&project);
|
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||||
async move {
|
caller
|
||||||
fut.await.expect("failed to update scripts");
|
.write_all(
|
||||||
|
generate_bin_linking_module(
|
||||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
root,
|
||||||
caller
|
&format!("{:?}", file_path.to_string_lossy()),
|
||||||
.write_all(
|
|
||||||
generate_bin_linking_module(
|
|
||||||
root,
|
|
||||||
&format!("{:?}", file_path.to_string_lossy()),
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)
|
)
|
||||||
.expect("failed to write to tempfile");
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.expect("failed to write to tempfile");
|
||||||
|
|
||||||
let status = Command::new("lune")
|
let status = Command::new("lune")
|
||||||
.arg("run")
|
.arg("run")
|
||||||
.arg(caller.path())
|
.arg(caller.path())
|
||||||
.arg("--")
|
.arg("--")
|
||||||
.args(&self.args)
|
.args(&self.args)
|
||||||
.current_dir(current_dir().expect("failed to get current directory"))
|
.current_dir(current_dir().expect("failed to get current directory"))
|
||||||
.status()
|
.status()
|
||||||
.expect("failed to run script");
|
.expect("failed to run script");
|
||||||
|
|
||||||
drop(caller);
|
drop(caller);
|
||||||
|
|
||||||
std::process::exit(status.code().unwrap_or(1))
|
std::process::exit(status.code().unwrap_or(1))
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(package_or_script) = self.package_or_script else {
|
let Some(package_or_script) = self.package_or_script else {
|
||||||
|
@ -62,8 +57,7 @@ impl RunCommand {
|
||||||
run(
|
run(
|
||||||
project.package_dir().to_owned(),
|
project.package_dir().to_owned(),
|
||||||
script_path.to_path(project.package_dir()),
|
script_path.to_path(project.package_dir()),
|
||||||
)
|
);
|
||||||
.await;
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +99,7 @@ impl RunCommand {
|
||||||
|
|
||||||
let path = bin_path.to_path(&container_folder);
|
let path = bin_path.to_path(&container_folder);
|
||||||
|
|
||||||
run(path.clone(), path).await;
|
run(path.clone(), path);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,8 +109,7 @@ impl RunCommand {
|
||||||
run(
|
run(
|
||||||
project.package_dir().to_path_buf(),
|
project.package_dir().to_path_buf(),
|
||||||
script_path.to_path(project.package_dir()),
|
script_path.to_path(project.package_dir()),
|
||||||
)
|
);
|
||||||
.await;
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -132,7 +125,7 @@ impl RunCommand {
|
||||||
.workspace_dir()
|
.workspace_dir()
|
||||||
.unwrap_or_else(|| project.package_dir());
|
.unwrap_or_else(|| project.package_dir());
|
||||||
|
|
||||||
let members = match project.workspace_members(workspace_dir).await {
|
let members = match project.workspace_members(workspace_dir, false).await {
|
||||||
Ok(members) => members.boxed(),
|
Ok(members) => members.boxed(),
|
||||||
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
|
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
|
||||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||||
|
@ -177,7 +170,7 @@ impl RunCommand {
|
||||||
project.package_dir().to_path_buf()
|
project.package_dir().to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
run(root, path).await;
|
run(root, path);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
config::read_config,
|
config::read_config,
|
||||||
version::{
|
version::{
|
||||||
current_version, get_latest_remote_version, get_or_download_version, update_bin_exe,
|
current_version, get_or_download_version, get_remote_version, no_build_metadata,
|
||||||
|
update_bin_exe, TagInfo, VersionType,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
@ -24,33 +25,33 @@ impl SelfUpgradeCommand {
|
||||||
.context("no cached version found")?
|
.context("no cached version found")?
|
||||||
.1
|
.1
|
||||||
} else {
|
} else {
|
||||||
get_latest_remote_version(&reqwest).await?
|
get_remote_version(&reqwest, VersionType::Latest).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
if latest_version <= current_version() {
|
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||||
|
|
||||||
|
if latest_version_no_metadata <= current_version() {
|
||||||
println!("already up to date");
|
println!("already up to date");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
|
||||||
|
|
||||||
if !inquire::prompt_confirmation(format!(
|
if !inquire::prompt_confirmation(format!(
|
||||||
"are you sure you want to upgrade {} from {} to {}?",
|
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
|
||||||
env!("CARGO_BIN_NAME").cyan(),
|
env!("CARGO_BIN_NAME").cyan(),
|
||||||
current_version().to_string().yellow().bold(),
|
env!("CARGO_PKG_VERSION").yellow().bold()
|
||||||
latest_version.to_string().yellow().bold()
|
|
||||||
))? {
|
))? {
|
||||||
println!("cancelled upgrade");
|
println!("cancelled upgrade");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = get_or_download_version(&reqwest, &latest_version, true)
|
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
|
||||||
.await?
|
.await?
|
||||||
.unwrap();
|
.unwrap();
|
||||||
update_bin_exe(&path).await?;
|
update_bin_exe(&path).await?;
|
||||||
|
|
||||||
println!(
|
println!("upgraded to version {display_latest_version}!");
|
||||||
"upgraded to version {}!",
|
|
||||||
latest_version.to_string().yellow().bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +1,16 @@
|
||||||
use crate::cli::{progress_bar, repos::update_scripts, run_on_workspace_members};
|
use crate::cli::{progress_bar, run_on_workspace_members};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::{lockfile::Lockfile, Project};
|
use pesde::{lockfile::Lockfile, Project};
|
||||||
use std::{collections::HashSet, sync::Arc};
|
use std::{collections::HashSet, sync::Arc};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[derive(Debug, Args, Copy, Clone)]
|
#[derive(Debug, Args, Copy, Clone)]
|
||||||
pub struct UpdateCommand {}
|
pub struct UpdateCommand {}
|
||||||
|
|
||||||
impl UpdateCommand {
|
impl UpdateCommand {
|
||||||
pub async fn run(
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
self,
|
|
||||||
project: Project,
|
|
||||||
multi: MultiProgress,
|
|
||||||
reqwest: reqwest::Client,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let mut refreshed_sources = HashSet::new();
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
|
@ -34,8 +29,7 @@ impl UpdateCommand {
|
||||||
.dependency_graph(None, &mut refreshed_sources, false)
|
.dependency_graph(None, &mut refreshed_sources, false)
|
||||||
.await
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
let graph = Arc::new(graph);
|
||||||
update_scripts(&project).await?;
|
|
||||||
|
|
||||||
project
|
project
|
||||||
.write_lockfile(Lockfile {
|
.write_lockfile(Lockfile {
|
||||||
|
@ -46,30 +40,34 @@ impl UpdateCommand {
|
||||||
|
|
||||||
graph: {
|
graph: {
|
||||||
let (rx, downloaded_graph) = project
|
let (rx, downloaded_graph) = project
|
||||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, false, false)
|
.download_and_link(
|
||||||
|
&graph,
|
||||||
|
&Arc::new(Mutex::new(refreshed_sources)),
|
||||||
|
&reqwest,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
|_| async { Ok::<_, std::io::Error>(()) },
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to download dependencies")?;
|
.context("failed to download dependencies")?;
|
||||||
|
|
||||||
progress_bar(
|
progress_bar(
|
||||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||||
rx,
|
rx,
|
||||||
&multi,
|
|
||||||
"📥 ".to_string(),
|
"📥 ".to_string(),
|
||||||
"downloading dependencies".to_string(),
|
"downloading dependencies".to_string(),
|
||||||
"downloaded dependencies".to_string(),
|
"downloaded dependencies".to_string(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Arc::into_inner(downloaded_graph)
|
downloaded_graph
|
||||||
.unwrap()
|
.await
|
||||||
.into_inner()
|
.context("failed to download dependencies")?
|
||||||
.unwrap()
|
|
||||||
},
|
},
|
||||||
|
|
||||||
workspace: run_on_workspace_members(&project, |project| {
|
workspace: run_on_workspace_members(&project, |project| {
|
||||||
let multi = multi.clone();
|
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
async move { Box::pin(self.run(project, multi, reqwest)).await }
|
async move { Box::pin(self.run(project, reqwest)).await }
|
||||||
})
|
})
|
||||||
.await?,
|
.await?,
|
||||||
})
|
})
|
||||||
|
|
|
@ -2,19 +2,16 @@ use crate::cli::{auth::Tokens, home_dir};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
pub struct CliConfig {
|
pub struct CliConfig {
|
||||||
#[serde(
|
#[serde(
|
||||||
serialize_with = "crate::util::serialize_gix_url",
|
serialize_with = "crate::util::serialize_gix_url",
|
||||||
deserialize_with = "crate::util::deserialize_gix_url"
|
deserialize_with = "crate::util::deserialize_gix_url"
|
||||||
)]
|
)]
|
||||||
pub default_index: gix::Url,
|
pub default_index: gix::Url,
|
||||||
#[serde(
|
|
||||||
serialize_with = "crate::util::serialize_gix_url",
|
|
||||||
deserialize_with = "crate::util::deserialize_gix_url"
|
|
||||||
)]
|
|
||||||
pub scripts_repo: gix::Url,
|
|
||||||
|
|
||||||
pub tokens: Tokens,
|
pub tokens: Tokens,
|
||||||
|
|
||||||
|
@ -26,7 +23,6 @@ impl Default for CliConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
||||||
scripts_repo: "https://github.com/pesde-pkg/scripts".try_into().unwrap(),
|
|
||||||
|
|
||||||
tokens: Tokens(Default::default()),
|
tokens: Tokens(Default::default()),
|
||||||
|
|
||||||
|
@ -35,6 +31,7 @@ impl Default for CliConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn read_config() -> anyhow::Result<CliConfig> {
|
pub async fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
||||||
Ok(config_string) => config_string,
|
Ok(config_string) => config_string,
|
||||||
|
@ -49,6 +46,7 @@ pub async fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
||||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||||
fs::write(home_dir()?.join("config.toml"), config_string)
|
fs::write(home_dir()?.join("config.toml"), config_string)
|
||||||
|
|
|
@ -2,7 +2,6 @@ use anyhow::Context;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
lockfile::Lockfile,
|
lockfile::Lockfile,
|
||||||
manifest::target::TargetKind,
|
manifest::target::TargetKind,
|
||||||
|
@ -19,12 +18,12 @@ use std::{
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
use tokio::pin;
|
use tokio::pin;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
pub mod repos;
|
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
pub mod version;
|
pub mod version;
|
||||||
|
|
||||||
|
@ -44,6 +43,7 @@ pub async fn bin_dir() -> anyhow::Result<PathBuf> {
|
||||||
Ok(bin_dir)
|
Ok(bin_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
||||||
let manifest = project.deser_manifest().await?;
|
let manifest = project.deser_manifest().await?;
|
||||||
let lockfile = match project.deser_lockfile().await {
|
let lockfile = match project.deser_lockfile().await {
|
||||||
|
@ -57,17 +57,17 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
||||||
};
|
};
|
||||||
|
|
||||||
if manifest.overrides != lockfile.overrides {
|
if manifest.overrides != lockfile.overrides {
|
||||||
log::debug!("overrides are different");
|
tracing::debug!("overrides are different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.target.kind() != lockfile.target {
|
if manifest.target.kind() != lockfile.target {
|
||||||
log::debug!("target kind is different");
|
tracing::debug!("target kind is different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
||||||
log::debug!("name or version is different");
|
tracing::debug!("name or version is different");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
||||||
.iter()
|
.iter()
|
||||||
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
||||||
|
|
||||||
log::debug!("dependencies are the same: {same_dependencies}");
|
tracing::debug!("dependencies are the same: {same_dependencies}");
|
||||||
|
|
||||||
Ok(if same_dependencies {
|
Ok(if same_dependencies {
|
||||||
Some(lockfile)
|
Some(lockfile)
|
||||||
|
@ -134,7 +134,7 @@ impl VersionedPackageName {
|
||||||
let versions = graph.get(&self.0).context("package not found in graph")?;
|
let versions = graph.get(&self.0).context("package not found in graph")?;
|
||||||
if versions.len() == 1 {
|
if versions.len() == 1 {
|
||||||
let version = versions.keys().next().unwrap().clone();
|
let version = versions.keys().next().unwrap().clone();
|
||||||
log::debug!("only one version found, using {version}");
|
tracing::debug!("only one version found, using {version}");
|
||||||
version
|
version
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
|
@ -196,21 +196,18 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
|
||||||
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
|
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
|
||||||
len: u64,
|
len: u64,
|
||||||
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
|
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
|
||||||
multi: &MultiProgress,
|
|
||||||
prefix: String,
|
prefix: String,
|
||||||
progress_msg: String,
|
progress_msg: String,
|
||||||
finish_msg: String,
|
finish_msg: String,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let bar = multi.add(
|
let bar = indicatif::ProgressBar::new(len)
|
||||||
indicatif::ProgressBar::new(len)
|
.with_style(
|
||||||
.with_style(
|
indicatif::ProgressStyle::default_bar()
|
||||||
indicatif::ProgressStyle::default_bar()
|
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
||||||
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
.progress_chars("█▓▒░ "),
|
||||||
.progress_chars("█▓▒░ "),
|
)
|
||||||
)
|
.with_prefix(prefix)
|
||||||
.with_prefix(prefix)
|
.with_message(progress_msg);
|
||||||
.with_message(progress_msg),
|
|
||||||
);
|
|
||||||
bar.enable_steady_tick(Duration::from_millis(100));
|
bar.enable_steady_tick(Duration::from_millis(100));
|
||||||
|
|
||||||
while let Some(result) = rx.recv().await {
|
while let Some(result) = rx.recv().await {
|
||||||
|
@ -249,7 +246,9 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
||||||
return Ok(Default::default());
|
return Ok(Default::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
let members_future = project.workspace_members(project.package_dir()).await?;
|
let members_future = project
|
||||||
|
.workspace_members(project.package_dir(), true)
|
||||||
|
.await?;
|
||||||
pin!(members_future);
|
pin!(members_future);
|
||||||
|
|
||||||
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
||||||
|
@ -258,7 +257,10 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
||||||
let relative_path =
|
let relative_path =
|
||||||
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
||||||
|
|
||||||
f(shift_project_dir(project, path)).await?;
|
// don't run on the current workspace root
|
||||||
|
if relative_path != "" {
|
||||||
|
f(shift_project_dir(project, path)).await?;
|
||||||
|
}
|
||||||
|
|
||||||
results
|
results
|
||||||
.entry(manifest.name)
|
.entry(manifest.name)
|
||||||
|
|
143
src/cli/repos.rs
143
src/cli/repos.rs
|
@ -1,143 +0,0 @@
|
||||||
use crate::{
|
|
||||||
cli::{config::read_config, home_dir},
|
|
||||||
util::authenticate_conn,
|
|
||||||
};
|
|
||||||
use anyhow::Context;
|
|
||||||
use fs_err::tokio as fs;
|
|
||||||
use gix::remote::{fetch::Shallow, Direction};
|
|
||||||
use pesde::Project;
|
|
||||||
use std::{path::Path, sync::atomic::AtomicBool};
|
|
||||||
use tokio::{runtime::Handle, task::spawn_blocking};
|
|
||||||
|
|
||||||
async fn update_repo<P: AsRef<Path>>(
|
|
||||||
name: &str,
|
|
||||||
path: P,
|
|
||||||
url: gix::Url,
|
|
||||||
project: &Project,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
let should_update = path.exists();
|
|
||||||
|
|
||||||
let (repo, oid) = if should_update {
|
|
||||||
let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
|
|
||||||
|
|
||||||
let remote = repo
|
|
||||||
.find_default_remote(Direction::Fetch)
|
|
||||||
.context(format!("missing default remote of {name} repository"))?
|
|
||||||
.context(format!(
|
|
||||||
"failed to find default remote of {name} repository"
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let mut connection = remote.connect(Direction::Fetch).context(format!(
|
|
||||||
"failed to connect to default remote of {name} repository"
|
|
||||||
))?;
|
|
||||||
|
|
||||||
authenticate_conn(&mut connection, project.auth_config());
|
|
||||||
|
|
||||||
let results = connection
|
|
||||||
.prepare_fetch(gix::progress::Discard, Default::default())
|
|
||||||
.context(format!("failed to prepare {name} repository fetch"))?
|
|
||||||
.with_shallow(Shallow::Deepen(1))
|
|
||||||
.receive(gix::progress::Discard, &false.into())
|
|
||||||
.context(format!("failed to receive new {name} repository contents"))?;
|
|
||||||
|
|
||||||
let remote_ref = results
|
|
||||||
.ref_map
|
|
||||||
.remote_refs
|
|
||||||
.first()
|
|
||||||
.context(format!("failed to get remote refs of {name} repository"))?;
|
|
||||||
|
|
||||||
let unpacked = remote_ref.unpack();
|
|
||||||
let oid = unpacked
|
|
||||||
.1
|
|
||||||
.or(unpacked.2)
|
|
||||||
.context("couldn't find oid in remote ref")?;
|
|
||||||
|
|
||||||
(repo, gix::ObjectId::from(oid))
|
|
||||||
} else {
|
|
||||||
fs::create_dir_all(path)
|
|
||||||
.await
|
|
||||||
.context(format!("failed to create {name} directory"))?;
|
|
||||||
|
|
||||||
let repo = gix::prepare_clone(url, path)
|
|
||||||
.context(format!("failed to prepare {name} repository clone"))?
|
|
||||||
.with_shallow(Shallow::Deepen(1))
|
|
||||||
.fetch_only(gix::progress::Discard, &false.into())
|
|
||||||
.context(format!("failed to fetch and checkout {name} repository"))?
|
|
||||||
.0;
|
|
||||||
|
|
||||||
let oid = {
|
|
||||||
let mut head = repo
|
|
||||||
.head()
|
|
||||||
.context(format!("failed to get {name} repository head"))?;
|
|
||||||
let obj = head
|
|
||||||
.peel_to_object_in_place()
|
|
||||||
.context(format!("failed to peel {name} repository head to object"))?;
|
|
||||||
|
|
||||||
obj.id
|
|
||||||
};
|
|
||||||
|
|
||||||
(repo, oid)
|
|
||||||
};
|
|
||||||
|
|
||||||
let tree = repo
|
|
||||||
.find_object(oid)
|
|
||||||
.context(format!("failed to find {name} repository tree"))?
|
|
||||||
.peel_to_tree()
|
|
||||||
.context(format!("failed to peel {name} repository object to tree"))?;
|
|
||||||
|
|
||||||
let mut index = gix::index::File::from_state(
|
|
||||||
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
|
|
||||||
format!("failed to create index state from {name} repository tree"),
|
|
||||||
)?,
|
|
||||||
repo.index_path(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let opts = gix::worktree::state::checkout::Options {
|
|
||||||
overwrite_existing: true,
|
|
||||||
destination_is_initially_empty: !should_update,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
gix::worktree::state::checkout(
|
|
||||||
&mut index,
|
|
||||||
repo.work_dir().context(format!("{name} repo is bare"))?,
|
|
||||||
repo.objects
|
|
||||||
.clone()
|
|
||||||
.into_arc()
|
|
||||||
.context("failed to clone objects")?,
|
|
||||||
&gix::progress::Discard,
|
|
||||||
&gix::progress::Discard,
|
|
||||||
&false.into(),
|
|
||||||
opts,
|
|
||||||
)
|
|
||||||
.context(format!("failed to checkout {name} repository"))?;
|
|
||||||
|
|
||||||
index
|
|
||||||
.write(gix::index::write::Options::default())
|
|
||||||
.context("failed to write index")
|
|
||||||
}
|
|
||||||
|
|
||||||
static SCRIPTS_UPDATED: AtomicBool = AtomicBool::new(false);
|
|
||||||
|
|
||||||
pub async fn update_scripts(project: &Project) -> anyhow::Result<()> {
|
|
||||||
if SCRIPTS_UPDATED.swap(true, std::sync::atomic::Ordering::Relaxed) {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let home_dir = home_dir()?;
|
|
||||||
let config = read_config().await?;
|
|
||||||
|
|
||||||
let project = project.clone();
|
|
||||||
spawn_blocking(move || {
|
|
||||||
Handle::current().block_on(update_repo(
|
|
||||||
"scripts",
|
|
||||||
home_dir.join("scripts"),
|
|
||||||
config.scripts_repo,
|
|
||||||
&project,
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.await??;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -15,7 +15,8 @@ use std::{
|
||||||
env::current_exe,
|
env::current_exe,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncWrite;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub fn current_version() -> Version {
|
pub fn current_version() -> Version {
|
||||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||||
|
@ -33,18 +34,33 @@ struct Asset {
|
||||||
url: url::Url,
|
url: url::Url,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
fn get_repo() -> (String, String) {
|
fn get_repo() -> (String, String) {
|
||||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||||
(
|
let (owner, repo) = (
|
||||||
parts.next().unwrap().to_string(),
|
parts.next().unwrap().to_string(),
|
||||||
parts.next().unwrap().to_string(),
|
parts.next().unwrap().to_string(),
|
||||||
)
|
);
|
||||||
|
|
||||||
|
tracing::trace!("repository for updates: {owner}/{repo}");
|
||||||
|
|
||||||
|
(owner, repo)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
|
#[derive(Debug)]
|
||||||
|
pub enum VersionType {
|
||||||
|
Latest,
|
||||||
|
Specific(Version),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
|
pub async fn get_remote_version(
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
|
ty: VersionType,
|
||||||
|
) -> anyhow::Result<Version> {
|
||||||
let (owner, repo) = get_repo();
|
let (owner, repo) = get_repo();
|
||||||
|
|
||||||
let releases = reqwest
|
let mut releases = reqwest
|
||||||
.get(format!(
|
.get(format!(
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases",
|
"https://api.github.com/repos/{owner}/{repo}/releases",
|
||||||
))
|
))
|
||||||
|
@ -55,17 +71,28 @@ pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Res
|
||||||
.context("failed to get GitHub API response")?
|
.context("failed to get GitHub API response")?
|
||||||
.json::<Vec<Release>>()
|
.json::<Vec<Release>>()
|
||||||
.await
|
.await
|
||||||
.context("failed to parse GitHub API response")?;
|
.context("failed to parse GitHub API response")?
|
||||||
|
|
||||||
releases
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|release| Version::parse(release.tag_name.trim_start_matches('v')).unwrap())
|
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
|
||||||
.max()
|
|
||||||
.context("failed to find latest version")
|
match ty {
|
||||||
|
VersionType::Latest => releases.max(),
|
||||||
|
VersionType::Specific(version) => {
|
||||||
|
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.context("failed to find latest version")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn no_build_metadata(version: &Version) -> Version {
|
||||||
|
let mut version = version.clone();
|
||||||
|
version.build = semver::BuildMetadata::EMPTY;
|
||||||
|
version
|
||||||
}
|
}
|
||||||
|
|
||||||
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
||||||
|
|
||||||
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
||||||
let config = read_config().await?;
|
let config = read_config().await?;
|
||||||
|
|
||||||
|
@ -73,9 +100,11 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
||||||
.last_checked_updates
|
.last_checked_updates
|
||||||
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
||||||
{
|
{
|
||||||
|
tracing::debug!("using cached version");
|
||||||
version
|
version
|
||||||
} else {
|
} else {
|
||||||
let version = get_latest_remote_version(reqwest).await?;
|
tracing::debug!("checking for updates");
|
||||||
|
let version = get_remote_version(reqwest, VersionType::Latest).await?;
|
||||||
|
|
||||||
write_config(&CliConfig {
|
write_config(&CliConfig {
|
||||||
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
||||||
|
@ -86,72 +115,77 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
||||||
version
|
version
|
||||||
};
|
};
|
||||||
let current_version = current_version();
|
let current_version = current_version();
|
||||||
|
let version_no_metadata = no_build_metadata(&version);
|
||||||
|
|
||||||
if version > current_version {
|
if version_no_metadata <= current_version {
|
||||||
let name = env!("CARGO_BIN_NAME");
|
return Ok(());
|
||||||
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"),);
|
|
||||||
|
|
||||||
let unformatted_messages = [
|
|
||||||
"".to_string(),
|
|
||||||
format!("update available! {current_version} → {version}"),
|
|
||||||
format!("changelog: {changelog}"),
|
|
||||||
format!("run `{name} self-upgrade` to upgrade"),
|
|
||||||
"".to_string(),
|
|
||||||
];
|
|
||||||
|
|
||||||
let width = unformatted_messages
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.chars().count())
|
|
||||||
.max()
|
|
||||||
.unwrap()
|
|
||||||
+ 4;
|
|
||||||
|
|
||||||
let column = "│".bright_magenta();
|
|
||||||
|
|
||||||
let message = [
|
|
||||||
"".to_string(),
|
|
||||||
format!(
|
|
||||||
"update available! {} → {}",
|
|
||||||
current_version.to_string().red(),
|
|
||||||
version.to_string().green()
|
|
||||||
),
|
|
||||||
format!("changelog: {}", changelog.blue()),
|
|
||||||
format!(
|
|
||||||
"run `{} {}` to upgrade",
|
|
||||||
name.blue(),
|
|
||||||
"self-upgrade".yellow()
|
|
||||||
),
|
|
||||||
"".to_string(),
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, s)| {
|
|
||||||
let text_length = unformatted_messages[i].chars().count();
|
|
||||||
let padding = (width as f32 - text_length as f32) / 2f32;
|
|
||||||
let padding_l = " ".repeat(padding.floor() as usize);
|
|
||||||
let padding_r = " ".repeat(padding.ceil() as usize);
|
|
||||||
format!("{column}{padding_l}{s}{padding_r}{column}")
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
let lines = "─".repeat(width).bright_magenta();
|
|
||||||
|
|
||||||
let tl = "╭".bright_magenta();
|
|
||||||
let tr = "╮".bright_magenta();
|
|
||||||
let bl = "╰".bright_magenta();
|
|
||||||
let br = "╯".bright_magenta();
|
|
||||||
|
|
||||||
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let name = env!("CARGO_BIN_NAME");
|
||||||
|
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
|
||||||
|
|
||||||
|
let unformatted_messages = [
|
||||||
|
"".to_string(),
|
||||||
|
format!("update available! {current_version} → {version_no_metadata}"),
|
||||||
|
format!("changelog: {changelog}"),
|
||||||
|
format!("run `{name} self-upgrade` to upgrade"),
|
||||||
|
"".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let width = unformatted_messages
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.chars().count())
|
||||||
|
.max()
|
||||||
|
.unwrap()
|
||||||
|
+ 4;
|
||||||
|
|
||||||
|
let column = "│".bright_magenta();
|
||||||
|
|
||||||
|
let message = [
|
||||||
|
"".to_string(),
|
||||||
|
format!(
|
||||||
|
"update available! {} → {}",
|
||||||
|
current_version.to_string().red(),
|
||||||
|
version_no_metadata.to_string().green()
|
||||||
|
),
|
||||||
|
format!("changelog: {}", changelog.blue()),
|
||||||
|
format!(
|
||||||
|
"run `{} {}` to upgrade",
|
||||||
|
name.blue(),
|
||||||
|
"self-upgrade".yellow()
|
||||||
|
),
|
||||||
|
"".to_string(),
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, s)| {
|
||||||
|
let text_length = unformatted_messages[i].chars().count();
|
||||||
|
let padding = (width as f32 - text_length as f32) / 2f32;
|
||||||
|
let padding_l = " ".repeat(padding.floor() as usize);
|
||||||
|
let padding_r = " ".repeat(padding.ceil() as usize);
|
||||||
|
format!("{column}{padding_l}{s}{padding_r}{column}")
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
let lines = "─".repeat(width).bright_magenta();
|
||||||
|
|
||||||
|
let tl = "╭".bright_magenta();
|
||||||
|
let tr = "╮".bright_magenta();
|
||||||
|
let bl = "╰".bright_magenta();
|
||||||
|
let br = "╯".bright_magenta();
|
||||||
|
|
||||||
|
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_github_release(
|
#[instrument(skip(reqwest, writer), level = "trace")]
|
||||||
|
pub async fn download_github_release<W: AsyncWrite + Unpin>(
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
version: &Version,
|
version: &Version,
|
||||||
) -> anyhow::Result<Vec<u8>> {
|
mut writer: W,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
let (owner, repo) = get_repo();
|
let (owner, repo) = get_repo();
|
||||||
|
|
||||||
let release = reqwest
|
let release = reqwest
|
||||||
|
@ -202,19 +236,22 @@ pub async fn download_github_release(
|
||||||
.context("archive has no entry")?
|
.context("archive has no entry")?
|
||||||
.context("failed to get first archive entry")?;
|
.context("failed to get first archive entry")?;
|
||||||
|
|
||||||
let mut result = Vec::new();
|
tokio::io::copy(&mut entry, &mut writer)
|
||||||
|
|
||||||
entry
|
|
||||||
.read_to_end(&mut result)
|
|
||||||
.await
|
.await
|
||||||
.context("failed to read archive entry bytes")?;
|
.context("failed to write archive entry to file")
|
||||||
|
.map(|_| ())
|
||||||
Ok(result)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum TagInfo {
|
||||||
|
Complete(Version),
|
||||||
|
Incomplete(Version),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
pub async fn get_or_download_version(
|
pub async fn get_or_download_version(
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
version: &Version,
|
tag: &TagInfo,
|
||||||
always_give_path: bool,
|
always_give_path: bool,
|
||||||
) -> anyhow::Result<Option<PathBuf>> {
|
) -> anyhow::Result<Option<PathBuf>> {
|
||||||
let path = home_dir()?.join("versions");
|
let path = home_dir()?.join("versions");
|
||||||
|
@ -222,11 +259,23 @@ pub async fn get_or_download_version(
|
||||||
.await
|
.await
|
||||||
.context("failed to create versions directory")?;
|
.context("failed to create versions directory")?;
|
||||||
|
|
||||||
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
|
let version = match tag {
|
||||||
|
TagInfo::Complete(version) => version,
|
||||||
|
// don't fetch the version since it could be cached
|
||||||
|
TagInfo::Incomplete(version) => version,
|
||||||
|
};
|
||||||
|
|
||||||
|
let path = path.join(format!(
|
||||||
|
"{}{}",
|
||||||
|
no_build_metadata(version),
|
||||||
|
std::env::consts::EXE_SUFFIX
|
||||||
|
));
|
||||||
|
|
||||||
let is_requested_version = !always_give_path && *version == current_version();
|
let is_requested_version = !always_give_path && *version == current_version();
|
||||||
|
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
|
tracing::debug!("version already exists");
|
||||||
|
|
||||||
return Ok(if is_requested_version {
|
return Ok(if is_requested_version {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
@ -235,14 +284,29 @@ pub async fn get_or_download_version(
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_requested_version {
|
if is_requested_version {
|
||||||
|
tracing::debug!("copying current executable to version directory");
|
||||||
fs::copy(current_exe()?, &path)
|
fs::copy(current_exe()?, &path)
|
||||||
.await
|
.await
|
||||||
.context("failed to copy current executable to version directory")?;
|
.context("failed to copy current executable to version directory")?;
|
||||||
} else {
|
} else {
|
||||||
let bytes = download_github_release(reqwest, version).await?;
|
let version = match tag {
|
||||||
fs::write(&path, bytes)
|
TagInfo::Complete(version) => version.clone(),
|
||||||
.await
|
TagInfo::Incomplete(version) => {
|
||||||
.context("failed to write downloaded version file")?;
|
get_remote_version(reqwest, VersionType::Specific(version.clone()))
|
||||||
|
.await
|
||||||
|
.context("failed to get remote version")?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!("downloading version");
|
||||||
|
download_github_release(
|
||||||
|
reqwest,
|
||||||
|
&version,
|
||||||
|
fs::File::create(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to create version file")?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
make_executable(&path)
|
make_executable(&path)
|
||||||
|
@ -256,6 +320,7 @@ pub async fn get_or_download_version(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
||||||
let bin_exe_path = bin_dir().await?.join(format!(
|
let bin_exe_path = bin_dir().await?.join(format!(
|
||||||
"{}{}",
|
"{}{}",
|
||||||
|
|
127
src/download.rs
127
src/download.rs
|
@ -13,16 +13,18 @@ use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
sync::{Arc, Mutex},
|
sync::{Arc, Mutex},
|
||||||
};
|
};
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
||||||
|
|
||||||
type MultithreadDownloadJob = (
|
pub(crate) type MultithreadDownloadJob = (
|
||||||
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
|
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
|
||||||
MultithreadedGraph,
|
MultithreadedGraph,
|
||||||
);
|
);
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Downloads a graph of dependencies
|
/// Downloads a graph of dependencies
|
||||||
|
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
|
||||||
pub async fn download_graph(
|
pub async fn download_graph(
|
||||||
&self,
|
&self,
|
||||||
graph: &DependencyGraph,
|
graph: &DependencyGraph,
|
||||||
|
@ -30,6 +32,7 @@ impl Project {
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
prod: bool,
|
prod: bool,
|
||||||
write: bool,
|
write: bool,
|
||||||
|
wally: bool,
|
||||||
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
||||||
let manifest = self.deser_manifest().await?;
|
let manifest = self.deser_manifest().await?;
|
||||||
let manifest_target_kind = manifest.target.kind();
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
@ -53,84 +56,102 @@ impl Project {
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let project = Arc::new(self.clone());
|
||||||
|
|
||||||
for (name, versions) in graph {
|
for (name, versions) in graph {
|
||||||
for (version_id, node) in versions {
|
for (version_id, node) in versions {
|
||||||
|
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||||
|
if node.pkg_ref.like_wally() != wally {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let tx = tx.clone();
|
let tx = tx.clone();
|
||||||
|
|
||||||
let name = name.clone();
|
let name = name.clone();
|
||||||
let version_id = version_id.clone();
|
let version_id = version_id.clone();
|
||||||
let node = node.clone();
|
let node = node.clone();
|
||||||
|
|
||||||
let project = Arc::new(self.clone());
|
let span = tracing::info_span!(
|
||||||
|
"download",
|
||||||
|
name = name.to_string(),
|
||||||
|
version_id = version_id.to_string()
|
||||||
|
);
|
||||||
|
|
||||||
|
let project = project.clone();
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
let downloaded_graph = downloaded_graph.clone();
|
let downloaded_graph = downloaded_graph.clone();
|
||||||
|
|
||||||
let package_dir = self.package_dir().to_path_buf();
|
let package_dir = self.package_dir().to_path_buf();
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(
|
||||||
let source = node.pkg_ref.source();
|
async move {
|
||||||
|
let source = node.pkg_ref.source();
|
||||||
|
|
||||||
let container_folder = node.container_folder(
|
let container_folder = node.container_folder(
|
||||||
&package_dir
|
&package_dir
|
||||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||||
.join(PACKAGES_CONTAINER_NAME),
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
&name,
|
&name,
|
||||||
version_id.version(),
|
version_id.version(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match fs::create_dir_all(&container_folder).await {
|
match fs::create_dir_all(&container_folder).await {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(e) => {
|
|
||||||
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let project = project.clone();
|
|
||||||
|
|
||||||
log::debug!("downloading {name}@{version_id}");
|
|
||||||
|
|
||||||
let (fs, target) =
|
|
||||||
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
|
||||||
Ok(target) => target,
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tx.send(Err(Box::new(e).into())).await.unwrap();
|
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
log::debug!("downloaded {name}@{version_id}");
|
let project = project.clone();
|
||||||
|
|
||||||
if write {
|
tracing::debug!("downloading");
|
||||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
|
||||||
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
let (fs, target) =
|
||||||
Ok(_) => {}
|
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
||||||
|
Ok(target) => target,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
tx.send(Err(Box::new(e).into())).await.unwrap();
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else {
|
|
||||||
log::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode");
|
tracing::debug!("downloaded");
|
||||||
|
|
||||||
|
if write {
|
||||||
|
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||||
|
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
tracing::debug!(
|
||||||
|
"skipping write to disk, dev dependency in prod mode"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let display_name = format!("{name}@{version_id}");
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||||
|
downloaded_graph
|
||||||
|
.entry(name)
|
||||||
|
.or_default()
|
||||||
|
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.send(Ok(display_name)).await.unwrap();
|
||||||
}
|
}
|
||||||
|
.instrument(span),
|
||||||
let display_name = format!("{name}@{version_id}");
|
);
|
||||||
|
|
||||||
{
|
|
||||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
|
||||||
downloaded_graph
|
|
||||||
.entry(name)
|
|
||||||
.or_default()
|
|
||||||
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
|
||||||
}
|
|
||||||
|
|
||||||
tx.send(Ok(display_name)).await.unwrap();
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
176
src/download_and_link.rs
Normal file
176
src/download_and_link.rs
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
use crate::{
|
||||||
|
lockfile::{DependencyGraph, DownloadedGraph},
|
||||||
|
manifest::DependencyType,
|
||||||
|
source::PackageSources,
|
||||||
|
Project,
|
||||||
|
};
|
||||||
|
use futures::FutureExt;
|
||||||
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
future::Future,
|
||||||
|
sync::{Arc, Mutex as StdMutex},
|
||||||
|
};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
|
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
||||||
|
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
|
||||||
|
if !prod {
|
||||||
|
return graph.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
graph
|
||||||
|
.iter()
|
||||||
|
.map(|(name, versions)| {
|
||||||
|
(
|
||||||
|
name.clone(),
|
||||||
|
versions
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
|
||||||
|
.map(|(v_id, node)| (v_id.clone(), node.clone()))
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Receiver for dependencies downloaded and linked
|
||||||
|
pub type DownloadAndLinkReceiver =
|
||||||
|
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
||||||
|
|
||||||
|
impl Project {
|
||||||
|
/// Downloads a graph of dependencies and links them in the correct order
|
||||||
|
#[instrument(
|
||||||
|
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
|
||||||
|
level = "debug"
|
||||||
|
)]
|
||||||
|
pub async fn download_and_link<
|
||||||
|
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
|
||||||
|
R: Future<Output = Result<(), E>> + Send,
|
||||||
|
E: Send + Sync + 'static,
|
||||||
|
>(
|
||||||
|
&self,
|
||||||
|
graph: &Arc<DependencyGraph>,
|
||||||
|
refreshed_sources: &Arc<Mutex<HashSet<PackageSources>>>,
|
||||||
|
reqwest: &reqwest::Client,
|
||||||
|
prod: bool,
|
||||||
|
write: bool,
|
||||||
|
pesde_cb: F,
|
||||||
|
) -> Result<
|
||||||
|
(
|
||||||
|
DownloadAndLinkReceiver,
|
||||||
|
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
|
||||||
|
),
|
||||||
|
errors::DownloadAndLinkError<E>,
|
||||||
|
> {
|
||||||
|
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||||
|
graph
|
||||||
|
.iter()
|
||||||
|
.map(|(_, versions)| versions.len())
|
||||||
|
.sum::<usize>()
|
||||||
|
.max(1),
|
||||||
|
);
|
||||||
|
let downloaded_graph = Arc::new(StdMutex::new(DownloadedGraph::default()));
|
||||||
|
|
||||||
|
let this = self.clone();
|
||||||
|
let graph = graph.clone();
|
||||||
|
let reqwest = reqwest.clone();
|
||||||
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
rx,
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut refreshed_sources = refreshed_sources.lock().await;
|
||||||
|
|
||||||
|
// step 1. download pesde dependencies
|
||||||
|
let (mut pesde_rx, pesde_graph) = this
|
||||||
|
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
|
||||||
|
.instrument(tracing::debug_span!("download (pesde)"))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
while let Some(result) = pesde_rx.recv().await {
|
||||||
|
tx.send(result).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let pesde_graph = Arc::into_inner(pesde_graph).unwrap().into_inner().unwrap();
|
||||||
|
|
||||||
|
// step 2. link pesde dependencies. do so without types
|
||||||
|
if write {
|
||||||
|
this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
|
||||||
|
.instrument(tracing::debug_span!("link (pesde)"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pesde_graph = Arc::new(pesde_graph);
|
||||||
|
|
||||||
|
pesde_cb(&pesde_graph)
|
||||||
|
.await
|
||||||
|
.map_err(errors::DownloadAndLinkError::PesdeCallback)?;
|
||||||
|
|
||||||
|
let pesde_graph = Arc::into_inner(pesde_graph).unwrap();
|
||||||
|
|
||||||
|
// step 3. download wally dependencies
|
||||||
|
let (mut wally_rx, wally_graph) = this
|
||||||
|
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
|
||||||
|
.instrument(tracing::debug_span!("download (wally)"))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
while let Some(result) = wally_rx.recv().await {
|
||||||
|
tx.send(result).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let wally_graph = Arc::into_inner(wally_graph).unwrap().into_inner().unwrap();
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||||
|
downloaded_graph.extend(pesde_graph);
|
||||||
|
for (name, versions) in wally_graph {
|
||||||
|
for (version_id, node) in versions {
|
||||||
|
downloaded_graph
|
||||||
|
.entry(name.clone())
|
||||||
|
.or_default()
|
||||||
|
.insert(version_id, node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let graph = Arc::into_inner(downloaded_graph)
|
||||||
|
.unwrap()
|
||||||
|
.into_inner()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// step 4. link ALL dependencies. do so with types
|
||||||
|
if write {
|
||||||
|
this.link_dependencies(&filter_graph(&graph, prod), true)
|
||||||
|
.instrument(tracing::debug_span!("link (all)"))
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(graph)
|
||||||
|
})
|
||||||
|
.map(|r| r.unwrap()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when downloading and linking dependencies
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// An error that can occur when downloading and linking dependencies
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum DownloadAndLinkError<E> {
|
||||||
|
/// An error occurred while downloading the graph
|
||||||
|
#[error("error downloading graph")]
|
||||||
|
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
|
||||||
|
|
||||||
|
/// An error occurred while linking dependencies
|
||||||
|
#[error("error linking dependencies")]
|
||||||
|
Linking(#[from] crate::linking::errors::LinkingError),
|
||||||
|
|
||||||
|
/// An error occurred while executing the pesde callback
|
||||||
|
#[error("error executing pesde callback")]
|
||||||
|
PesdeCallback(#[source] E),
|
||||||
|
}
|
||||||
|
}
|
51
src/lib.rs
51
src/lib.rs
|
@ -14,12 +14,16 @@ use futures::{future::try_join_all, Stream};
|
||||||
use gix::sec::identity::Account;
|
use gix::sec::identity::Account;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
|
fmt::Debug,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
use tracing::instrument;
|
||||||
use wax::Pattern;
|
use wax::Pattern;
|
||||||
|
|
||||||
/// Downloading packages
|
/// Downloading packages
|
||||||
pub mod download;
|
pub mod download;
|
||||||
|
/// Utility for downloading and linking in the correct order
|
||||||
|
pub mod download_and_link;
|
||||||
/// Linking packages
|
/// Linking packages
|
||||||
pub mod linking;
|
pub mod linking;
|
||||||
/// Lockfile
|
/// Lockfile
|
||||||
|
@ -48,6 +52,8 @@ pub const DEFAULT_INDEX_NAME: &str = "default";
|
||||||
/// The name of the packages container
|
/// The name of the packages container
|
||||||
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
|
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
|
||||||
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
|
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
|
||||||
|
/// The folder in which scripts are linked
|
||||||
|
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
|
||||||
|
|
||||||
/// Struct containing the authentication configuration
|
/// Struct containing the authentication configuration
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
|
@ -145,29 +151,35 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the manifest file
|
/// Read the manifest file
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(string)
|
Ok(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: cache the manifest
|
||||||
/// Deserialize the manifest file
|
/// Deserialize the manifest file
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the manifest file
|
/// Write the manifest file
|
||||||
|
#[instrument(skip(self, manifest), level = "debug")]
|
||||||
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
||||||
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
|
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize the lockfile
|
/// Deserialize the lockfile
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the lockfile
|
/// Write the lockfile
|
||||||
|
#[instrument(skip(self, lockfile), level = "debug")]
|
||||||
pub async fn write_lockfile(
|
pub async fn write_lockfile(
|
||||||
&self,
|
&self,
|
||||||
lockfile: Lockfile,
|
lockfile: Lockfile,
|
||||||
|
@ -178,9 +190,11 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the workspace members
|
/// Get the workspace members
|
||||||
pub async fn workspace_members<P: AsRef<Path>>(
|
#[instrument(skip(self), level = "debug")]
|
||||||
|
pub async fn workspace_members<P: AsRef<Path> + Debug>(
|
||||||
&self,
|
&self,
|
||||||
dir: P,
|
dir: P,
|
||||||
|
can_ref_self: bool,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
|
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
|
||||||
errors::WorkspaceMembersError,
|
errors::WorkspaceMembersError,
|
||||||
|
@ -197,6 +211,7 @@ impl Project {
|
||||||
dir,
|
dir,
|
||||||
manifest.workspace_members.iter().map(|s| s.as_str()),
|
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||||
false,
|
false,
|
||||||
|
can_ref_self,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -216,7 +231,16 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets all matching paths in a directory
|
/// Gets all matching paths in a directory
|
||||||
pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>(
|
#[deprecated(
|
||||||
|
since = "0.5.0-rc.13",
|
||||||
|
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
|
||||||
|
)]
|
||||||
|
#[instrument(ret, level = "trace")]
|
||||||
|
pub async fn matching_globs_old_behaviour<
|
||||||
|
'a,
|
||||||
|
P: AsRef<Path> + Debug,
|
||||||
|
I: IntoIterator<Item = &'a str> + Debug,
|
||||||
|
>(
|
||||||
dir: P,
|
dir: P,
|
||||||
globs: I,
|
globs: I,
|
||||||
relative: bool,
|
relative: bool,
|
||||||
|
@ -255,17 +279,16 @@ pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<It
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||||
let file_name = path.file_name().unwrap();
|
let file_name = path.file_name().unwrap();
|
||||||
let mut is_filename_match = false;
|
let is_filename_match =
|
||||||
|
is_root && file_name.to_str().is_some_and(|s| file_names.contains(s));
|
||||||
|
|
||||||
if entry.file_type().await?.is_dir() {
|
if entry.file_type().await?.is_dir() {
|
||||||
is_filename_match =
|
|
||||||
is_root && file_name.to_str().is_some_and(|s| file_names.contains(s));
|
|
||||||
read_dirs.push((
|
read_dirs.push((
|
||||||
fs::read_dir(&path).await?,
|
fs::read_dir(&path).await?,
|
||||||
is_entire_dir_included || is_filename_match,
|
is_entire_dir_included || is_filename_match,
|
||||||
));
|
));
|
||||||
if is_filename_match {
|
if is_filename_match {
|
||||||
log::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
|
tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,14 +311,18 @@ pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<It
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets all matching paths in a directory
|
/// Gets all matching paths in a directory
|
||||||
pub async fn matching_globs<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>(
|
#[instrument(ret, level = "trace")]
|
||||||
|
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
|
||||||
dir: P,
|
dir: P,
|
||||||
globs: I,
|
globs: I,
|
||||||
relative: bool,
|
relative: bool,
|
||||||
|
can_ref_self: bool,
|
||||||
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||||
let (negative_globs, positive_globs): (Vec<&str>, Vec<&str>) =
|
let (negative_globs, mut positive_globs): (HashSet<&str>, _) =
|
||||||
globs.into_iter().partition(|glob| glob.starts_with('!'));
|
globs.into_iter().partition(|glob| glob.starts_with('!'));
|
||||||
|
|
||||||
|
let include_self = positive_globs.remove(".") && can_ref_self;
|
||||||
|
|
||||||
let negative_globs = wax::any(
|
let negative_globs = wax::any(
|
||||||
negative_globs
|
negative_globs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -312,6 +339,14 @@ pub async fn matching_globs<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>
|
||||||
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf()).await?];
|
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf()).await?];
|
||||||
let mut paths = HashSet::new();
|
let mut paths = HashSet::new();
|
||||||
|
|
||||||
|
if include_self {
|
||||||
|
paths.insert(if relative {
|
||||||
|
PathBuf::new()
|
||||||
|
} else {
|
||||||
|
dir.as_ref().to_path_buf()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
while let Some(mut read_dir) = read_dirs.pop() {
|
while let Some(mut read_dir) = read_dirs.pop() {
|
||||||
while let Some(entry) = read_dir.next_entry().await? {
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
|
|
|
@ -69,10 +69,29 @@ pub fn generate_lib_linking_module<I: IntoIterator<Item = S>, S: AsRef<str>>(
|
||||||
fn luau_style_path(path: &Path) -> String {
|
fn luau_style_path(path: &Path) -> String {
|
||||||
let path = path
|
let path = path
|
||||||
.components()
|
.components()
|
||||||
.filter_map(|ct| match ct {
|
.zip(
|
||||||
|
path.components()
|
||||||
|
.skip(1)
|
||||||
|
.map(Some)
|
||||||
|
.chain(std::iter::repeat(None)),
|
||||||
|
)
|
||||||
|
.filter_map(|(ct, next_ct)| match ct {
|
||||||
Component::CurDir => Some(".".to_string()),
|
Component::CurDir => Some(".".to_string()),
|
||||||
Component::ParentDir => Some("..".to_string()),
|
Component::ParentDir => Some("..".to_string()),
|
||||||
Component::Normal(part) => Some(format!("{}", part.to_string_lossy())),
|
Component::Normal(part) => {
|
||||||
|
let str = part.to_string_lossy();
|
||||||
|
|
||||||
|
Some(
|
||||||
|
(if next_ct.is_some() {
|
||||||
|
&str
|
||||||
|
} else {
|
||||||
|
str.strip_suffix(".luau")
|
||||||
|
.or_else(|| str.strip_suffix(".lua"))
|
||||||
|
.unwrap_or(&str)
|
||||||
|
})
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
|
@ -98,10 +117,10 @@ pub fn get_lib_require_path(
|
||||||
) -> Result<String, errors::GetLibRequirePath> {
|
) -> Result<String, errors::GetLibRequirePath> {
|
||||||
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||||
let path = if use_new_structure {
|
let path = if use_new_structure {
|
||||||
log::debug!("using new structure for require path with {:?}", lib_file);
|
tracing::debug!("using new structure for require path with {lib_file:?}");
|
||||||
lib_file.to_path(path)
|
lib_file.to_path(path)
|
||||||
} else {
|
} else {
|
||||||
log::debug!("using old structure for require path with {:?}", lib_file);
|
tracing::debug!("using old structure for require path with {lib_file:?}");
|
||||||
path
|
path
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -126,14 +145,26 @@ pub fn get_lib_require_path(
|
||||||
|
|
||||||
let path = path
|
let path = path
|
||||||
.components()
|
.components()
|
||||||
.filter_map(|component| match component {
|
.zip(
|
||||||
|
path.components()
|
||||||
|
.skip(1)
|
||||||
|
.map(Some)
|
||||||
|
.chain(std::iter::repeat(None)),
|
||||||
|
)
|
||||||
|
.filter_map(|(component, next_comp)| match component {
|
||||||
Component::ParentDir => Some(".Parent".to_string()),
|
Component::ParentDir => Some(".Parent".to_string()),
|
||||||
Component::Normal(part) if part != "init.lua" && part != "init.luau" => {
|
Component::Normal(part) if part != "init.lua" && part != "init.luau" => {
|
||||||
|
let str = part.to_string_lossy();
|
||||||
|
|
||||||
Some(format!(
|
Some(format!(
|
||||||
"[{:?}]",
|
"[{:?}]",
|
||||||
part.to_string_lossy()
|
if next_comp.is_some() {
|
||||||
.trim_end_matches(".lua")
|
&str
|
||||||
.trim_end_matches(".luau")
|
} else {
|
||||||
|
str.strip_suffix(".luau")
|
||||||
|
.or_else(|| str.strip_suffix(".lua"))
|
||||||
|
.unwrap_or(&str)
|
||||||
|
}
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -168,12 +199,30 @@ pub fn get_bin_require_path(
|
||||||
luau_style_path(&path)
|
luau_style_path(&path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate a linking module for a script
|
||||||
|
pub fn generate_script_linking_module(require_path: &str) -> String {
|
||||||
|
format!(r#"return require({require_path})"#)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the require path for a script
|
||||||
|
pub fn get_script_require_path(
|
||||||
|
base_dir: &Path,
|
||||||
|
script_file: &RelativePathBuf,
|
||||||
|
destination_dir: &Path,
|
||||||
|
) -> String {
|
||||||
|
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||||
|
let path = script_file.to_path(path);
|
||||||
|
|
||||||
|
luau_style_path(&path)
|
||||||
|
}
|
||||||
|
|
||||||
/// Errors for the linking module utilities
|
/// Errors for the linking module utilities
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// An error occurred while getting the require path for a library
|
/// An error occurred while getting the require path for a library
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum GetLibRequirePath {
|
pub enum GetLibRequirePath {
|
||||||
/// The path for the RobloxPlaceKind could not be found
|
/// The path for the RobloxPlaceKind could not be found
|
||||||
#[error("could not find the path for the RobloxPlaceKind {0}")]
|
#[error("could not find the path for the RobloxPlaceKind {0}")]
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
linking::generator::get_file_types,
|
linking::generator::get_file_types,
|
||||||
lockfile::DownloadedGraph,
|
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||||
|
manifest::Manifest,
|
||||||
|
names::PackageNames,
|
||||||
scripts::{execute_script, ScriptName},
|
scripts::{execute_script, ScriptName},
|
||||||
source::{
|
source::{
|
||||||
fs::{cas_path, store_in_cas},
|
fs::{cas_path, store_in_cas},
|
||||||
traits::PackageRef,
|
traits::PackageRef,
|
||||||
|
version_id::VersionId,
|
||||||
},
|
},
|
||||||
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME,
|
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
|
@ -17,6 +20,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::task::spawn_blocking;
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
/// Generates linking modules for a project
|
/// Generates linking modules for a project
|
||||||
pub mod generator;
|
pub mod generator;
|
||||||
|
@ -30,117 +34,237 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
|
||||||
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||||
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
||||||
|
|
||||||
|
match fs::remove_file(&destination).await {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
};
|
||||||
|
|
||||||
fs::hard_link(cas_path(&hash, cas_dir), destination).await
|
fs::hard_link(cas_path(&hash, cas_dir), destination).await
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Links the dependencies of the project
|
/// Links the dependencies of the project
|
||||||
|
#[instrument(skip(self, graph), level = "debug")]
|
||||||
pub async fn link_dependencies(
|
pub async fn link_dependencies(
|
||||||
&self,
|
&self,
|
||||||
graph: &DownloadedGraph,
|
graph: &DownloadedGraph,
|
||||||
|
with_types: bool,
|
||||||
) -> Result<(), errors::LinkingError> {
|
) -> Result<(), errors::LinkingError> {
|
||||||
let manifest = self.deser_manifest().await?;
|
let manifest = self.deser_manifest().await?;
|
||||||
let manifest_target_kind = manifest.target.kind();
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
let manifest = Arc::new(manifest);
|
||||||
|
|
||||||
|
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
||||||
|
// we do this separately to allow the required tools for the scripts to be installed
|
||||||
|
self.link(graph, &manifest, &Arc::new(Default::default()), false)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !with_types {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// step 2. extract the types from libraries, prepare Roblox packages for syncing
|
||||||
let roblox_sync_config_gen_script = manifest
|
let roblox_sync_config_gen_script = manifest
|
||||||
.scripts
|
.scripts
|
||||||
.get(&ScriptName::RobloxSyncConfigGenerator.to_string());
|
.get(&ScriptName::RobloxSyncConfigGenerator.to_string());
|
||||||
|
|
||||||
let package_types = try_join_all(
|
let package_types = try_join_all(graph.iter().map(|(name, versions)| async move {
|
||||||
graph
|
Ok::<_, errors::LinkingError>((
|
||||||
.iter()
|
name,
|
||||||
.map(|(name, versions)| async move {
|
try_join_all(versions.iter().map(|(version_id, node)| async move {
|
||||||
Ok::<_, errors::LinkingError>((name, try_join_all(versions.iter().map(|(version_id, node)| async move {
|
let Some(lib_file) = node.target.lib_path() else {
|
||||||
let Some(lib_file) = node.target.lib_path() else {
|
return Ok((version_id, vec![]));
|
||||||
return Ok((version_id, vec![]));
|
};
|
||||||
|
|
||||||
|
let container_folder = node.node.container_folder(
|
||||||
|
&self
|
||||||
|
.package_dir()
|
||||||
|
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||||
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
|
name,
|
||||||
|
version_id.version(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
||||||
|
let lib_file = lib_file.to_path(&container_folder);
|
||||||
|
|
||||||
|
let contents = match fs::read_to_string(&lib_file).await {
|
||||||
|
Ok(contents) => contents,
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
return Err(errors::LinkingError::LibFileNotFound(
|
||||||
|
lib_file.display().to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let container_folder = node.node.container_folder(
|
let types = match spawn_blocking(move || get_file_types(&contents))
|
||||||
&self
|
.await
|
||||||
.package_dir()
|
.unwrap()
|
||||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
|
||||||
.join(PACKAGES_CONTAINER_NAME),
|
|
||||||
name,
|
|
||||||
version_id.version(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
|
||||||
let lib_file = lib_file.to_path(&container_folder);
|
|
||||||
|
|
||||||
let contents = match fs::read_to_string(&lib_file).await {
|
|
||||||
Ok(contents) => contents,
|
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(errors::LinkingError::LibFileNotFound(
|
|
||||||
lib_file.display().to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let types = match spawn_blocking(move || get_file_types(&contents)).await.unwrap() {
|
|
||||||
Ok(types) => types,
|
|
||||||
Err(e) => {
|
|
||||||
return Err(errors::LinkingError::FullMoon(
|
|
||||||
lib_file.display().to_string(),
|
|
||||||
e,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
log::debug!("{name}@{version_id} has {} exported types", types.len());
|
|
||||||
|
|
||||||
types
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(build_files) = Some(&node.target)
|
|
||||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
|
||||||
.and_then(|t| t.build_files())
|
|
||||||
{
|
{
|
||||||
let Some(script_path) = roblox_sync_config_gen_script else {
|
Ok(types) => types,
|
||||||
log::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
|
Err(e) => {
|
||||||
return Ok((version_id, vec![]));
|
return Err(errors::LinkingError::FullMoon(
|
||||||
};
|
lib_file.display().to_string(),
|
||||||
|
e,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
execute_script(
|
tracing::debug!("contains {} exported types", types.len());
|
||||||
ScriptName::RobloxSyncConfigGenerator,
|
|
||||||
&script_path.to_path(self.package_dir()),
|
|
||||||
std::iter::once(container_folder.as_os_str())
|
|
||||||
.chain(build_files.iter().map(OsStr::new)),
|
|
||||||
self,
|
|
||||||
false,
|
|
||||||
)
|
|
||||||
.map_err(|e| {
|
|
||||||
errors::LinkingError::GenerateRobloxSyncConfig(
|
|
||||||
container_folder.display().to_string(),
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((version_id, types))
|
types
|
||||||
})).await?.into_iter().collect::<HashMap<_, _>>()))
|
} else {
|
||||||
}
|
vec![]
|
||||||
)
|
};
|
||||||
)
|
|
||||||
|
if let Some(build_files) = Some(&node.target)
|
||||||
|
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||||
|
.and_then(|t| t.build_files())
|
||||||
|
{
|
||||||
|
let Some(script_path) = roblox_sync_config_gen_script else {
|
||||||
|
tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
|
||||||
|
return Ok((version_id, types));
|
||||||
|
};
|
||||||
|
|
||||||
|
execute_script(
|
||||||
|
ScriptName::RobloxSyncConfigGenerator,
|
||||||
|
&script_path.to_path(self.package_dir()),
|
||||||
|
std::iter::once(container_folder.as_os_str())
|
||||||
|
.chain(build_files.iter().map(OsStr::new)),
|
||||||
|
self,
|
||||||
|
false,
|
||||||
|
).await
|
||||||
|
.map_err(|e| {
|
||||||
|
errors::LinkingError::GenerateRobloxSyncConfig(
|
||||||
|
container_folder.display().to_string(),
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((version_id, types))
|
||||||
|
}.instrument(tracing::debug_span!("extract types", name = name.to_string(), version_id = version_id.to_string()))))
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
|
))
|
||||||
|
}))
|
||||||
.await?
|
.await?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let manifest = Arc::new(manifest);
|
// step 3. link all packages (and their dependencies), this time with types
|
||||||
let package_types = Arc::new(package_types);
|
self.link(graph, &manifest, &Arc::new(package_types), true)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn link_files(
|
||||||
|
&self,
|
||||||
|
base_folder: &Path,
|
||||||
|
container_folder: &Path,
|
||||||
|
root_container_folder: &Path,
|
||||||
|
relative_container_folder: &Path,
|
||||||
|
node: &DownloadedDependencyGraphNode,
|
||||||
|
name: &PackageNames,
|
||||||
|
version_id: &VersionId,
|
||||||
|
alias: &str,
|
||||||
|
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>,
|
||||||
|
manifest: &Manifest,
|
||||||
|
) -> Result<(), errors::LinkingError> {
|
||||||
|
static NO_TYPES: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
if let Some(lib_file) = node.target.lib_path() {
|
||||||
|
let lib_module = generator::generate_lib_linking_module(
|
||||||
|
&generator::get_lib_require_path(
|
||||||
|
&node.target.kind(),
|
||||||
|
base_folder,
|
||||||
|
lib_file,
|
||||||
|
container_folder,
|
||||||
|
node.node.pkg_ref.use_new_structure(),
|
||||||
|
root_container_folder,
|
||||||
|
relative_container_folder,
|
||||||
|
manifest,
|
||||||
|
)?,
|
||||||
|
package_types
|
||||||
|
.get(name)
|
||||||
|
.and_then(|v| v.get(version_id))
|
||||||
|
.unwrap_or(&NO_TYPES),
|
||||||
|
);
|
||||||
|
|
||||||
|
write_cas(
|
||||||
|
base_folder.join(format!("{alias}.luau")),
|
||||||
|
self.cas_dir(),
|
||||||
|
&lib_module,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(bin_file) = node.target.bin_path() {
|
||||||
|
let bin_module = generator::generate_bin_linking_module(
|
||||||
|
container_folder,
|
||||||
|
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
||||||
|
);
|
||||||
|
|
||||||
|
write_cas(
|
||||||
|
base_folder.join(format!("{alias}.bin.luau")),
|
||||||
|
self.cas_dir(),
|
||||||
|
&bin_module,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
|
||||||
|
let scripts_base =
|
||||||
|
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for (script_name, script_path) in scripts {
|
||||||
|
let script_module =
|
||||||
|
generator::generate_script_linking_module(&generator::get_script_require_path(
|
||||||
|
&scripts_base,
|
||||||
|
script_path,
|
||||||
|
container_folder,
|
||||||
|
));
|
||||||
|
|
||||||
|
write_cas(
|
||||||
|
scripts_base.join(format!("{script_name}.luau")),
|
||||||
|
self.cas_dir(),
|
||||||
|
&script_module,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn link(
|
||||||
|
&self,
|
||||||
|
graph: &DownloadedGraph,
|
||||||
|
manifest: &Arc<Manifest>,
|
||||||
|
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
|
||||||
|
is_complete: bool,
|
||||||
|
) -> Result<(), errors::LinkingError> {
|
||||||
try_join_all(graph.iter().flat_map(|(name, versions)| {
|
try_join_all(graph.iter().flat_map(|(name, versions)| {
|
||||||
versions.iter().map(|(version_id, node)| {
|
versions.iter().map(|(version_id, node)| {
|
||||||
let name = name.clone();
|
let name = name.clone();
|
||||||
let manifest = manifest.clone();
|
let manifest = manifest.clone();
|
||||||
let package_types = package_types.clone();
|
let package_types = package_types.clone();
|
||||||
|
|
||||||
|
let span = tracing::info_span!(
|
||||||
|
"link",
|
||||||
|
name = name.to_string(),
|
||||||
|
version_id = version_id.to_string()
|
||||||
|
);
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
let (node_container_folder, node_packages_folder) = {
|
let (node_container_folder, node_packages_folder) = {
|
||||||
let base_folder = create_and_canonicalize(
|
let base_folder = create_and_canonicalize(
|
||||||
self.package_dir()
|
self.package_dir()
|
||||||
.join(manifest_target_kind.packages_folder(version_id.target())),
|
.join(manifest.target.kind().packages_folder(version_id.target())),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||||
|
@ -151,50 +275,20 @@ impl Project {
|
||||||
version_id.version(),
|
version_id.version(),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((alias, _, _)) = &node.node.direct.as_ref() {
|
if let Some((alias, _, _)) = &node.node.direct {
|
||||||
if let Some((lib_file, types)) =
|
self.link_files(
|
||||||
node.target.lib_path().and_then(|lib_file| {
|
&base_folder,
|
||||||
package_types
|
&container_folder,
|
||||||
.get(&name)
|
&base_folder,
|
||||||
.and_then(|v| v.get(version_id))
|
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||||
.map(|types| (lib_file, types))
|
node,
|
||||||
})
|
&name,
|
||||||
{
|
version_id,
|
||||||
write_cas(
|
alias,
|
||||||
base_folder.join(format!("{alias}.luau")),
|
&package_types,
|
||||||
self.cas_dir(),
|
&manifest,
|
||||||
&generator::generate_lib_linking_module(
|
)
|
||||||
&generator::get_lib_require_path(
|
.await?;
|
||||||
&node.target.kind(),
|
|
||||||
&base_folder,
|
|
||||||
lib_file,
|
|
||||||
&container_folder,
|
|
||||||
node.node.pkg_ref.use_new_structure(),
|
|
||||||
&base_folder,
|
|
||||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
|
||||||
&manifest,
|
|
||||||
)?,
|
|
||||||
types,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(bin_file) = node.target.bin_path() {
|
|
||||||
write_cas(
|
|
||||||
base_folder.join(format!("{alias}.bin.luau")),
|
|
||||||
self.cas_dir(),
|
|
||||||
&generator::generate_bin_linking_module(
|
|
||||||
&container_folder,
|
|
||||||
&generator::get_bin_require_path(
|
|
||||||
&base_folder,
|
|
||||||
bin_file,
|
|
||||||
&container_folder,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(container_folder, base_folder)
|
(container_folder, base_folder)
|
||||||
|
@ -207,13 +301,13 @@ impl Project {
|
||||||
.get(dependency_name)
|
.get(dependency_name)
|
||||||
.and_then(|v| v.get(dependency_version_id))
|
.and_then(|v| v.get(dependency_version_id))
|
||||||
else {
|
else {
|
||||||
return Err(errors::LinkingError::DependencyNotFound(
|
if is_complete {
|
||||||
dependency_name.to_string(),
|
return Err(errors::LinkingError::DependencyNotFound(
|
||||||
dependency_version_id.to_string(),
|
format!("{dependency_name}@{dependency_version_id}"),
|
||||||
));
|
format!("{name}@{version_id}"),
|
||||||
};
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let Some(lib_file) = dependency_node.target.lib_path() else {
|
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -241,31 +335,24 @@ impl Project {
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
write_cas(
|
self.link_files(
|
||||||
linker_folder.join(format!("{dependency_alias}.luau")),
|
&linker_folder,
|
||||||
self.cas_dir(),
|
&container_folder,
|
||||||
&generator::generate_lib_linking_module(
|
&node_packages_folder,
|
||||||
&generator::get_lib_require_path(
|
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||||
&dependency_node.target.kind(),
|
dependency_node,
|
||||||
&linker_folder,
|
dependency_name,
|
||||||
lib_file,
|
dependency_version_id,
|
||||||
&container_folder,
|
dependency_alias,
|
||||||
dependency_node.node.pkg_ref.use_new_structure(),
|
&package_types,
|
||||||
&node_packages_folder,
|
&manifest,
|
||||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
|
||||||
&manifest,
|
|
||||||
)?,
|
|
||||||
package_types
|
|
||||||
.get(dependency_name)
|
|
||||||
.and_then(|v| v.get(dependency_version_id))
|
|
||||||
.unwrap(),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
.instrument(span)
|
||||||
})
|
})
|
||||||
}))
|
}))
|
||||||
.await
|
.await
|
||||||
|
@ -290,7 +377,7 @@ pub mod errors {
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
/// A dependency was not found
|
/// A dependency was not found
|
||||||
#[error("dependency not found: {0}@{1}")]
|
#[error("dependency `{0}` of `{1}` not found")]
|
||||||
DependencyNotFound(String, String),
|
DependencyNotFound(String, String),
|
||||||
|
|
||||||
/// The library file was not found
|
/// The library file was not found
|
||||||
|
|
|
@ -14,7 +14,7 @@ use relative_path::RelativePathBuf;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{btree_map::Entry, BTreeMap},
|
collections::BTreeMap,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -32,6 +32,9 @@ pub struct DependencyGraphNode {
|
||||||
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
|
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
|
||||||
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||||
pub resolved_ty: DependencyType,
|
pub resolved_ty: DependencyType,
|
||||||
|
/// Whether the resolved type should be Peer if this isn't depended on
|
||||||
|
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||||
|
pub is_peer: bool,
|
||||||
/// The package reference
|
/// The package reference
|
||||||
pub pkg_ref: PackageRefs,
|
pub pkg_ref: PackageRefs,
|
||||||
}
|
}
|
||||||
|
@ -52,6 +55,18 @@ impl DependencyGraphNode {
|
||||||
name: &PackageNames,
|
name: &PackageNames,
|
||||||
version: &Version,
|
version: &Version,
|
||||||
) -> PathBuf {
|
) -> PathBuf {
|
||||||
|
if self.pkg_ref.like_wally() {
|
||||||
|
return path
|
||||||
|
.as_ref()
|
||||||
|
.join(format!(
|
||||||
|
"{}_{}@{}",
|
||||||
|
name.as_str().0,
|
||||||
|
name.as_str().1,
|
||||||
|
version
|
||||||
|
))
|
||||||
|
.join(name.as_str().1);
|
||||||
|
}
|
||||||
|
|
||||||
path.as_ref()
|
path.as_ref()
|
||||||
.join(name.escaped())
|
.join(name.escaped())
|
||||||
.join(version.to_string())
|
.join(version.to_string())
|
||||||
|
@ -62,45 +77,6 @@ impl DependencyGraphNode {
|
||||||
/// A graph of `DependencyGraphNode`s
|
/// A graph of `DependencyGraphNode`s
|
||||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||||
|
|
||||||
pub(crate) fn insert_node(
|
|
||||||
graph: &mut DependencyGraph,
|
|
||||||
name: PackageNames,
|
|
||||||
version: VersionId,
|
|
||||||
mut node: DependencyGraphNode,
|
|
||||||
is_top_level: bool,
|
|
||||||
) {
|
|
||||||
if !is_top_level && node.direct.take().is_some() {
|
|
||||||
log::debug!(
|
|
||||||
"tried to insert {name}@{version} as direct dependency from a non top-level context",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
match graph
|
|
||||||
.entry(name.clone())
|
|
||||||
.or_default()
|
|
||||||
.entry(version.clone())
|
|
||||||
{
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
entry.insert(node);
|
|
||||||
}
|
|
||||||
Entry::Occupied(existing) => {
|
|
||||||
let current_node = existing.into_mut();
|
|
||||||
|
|
||||||
match (¤t_node.direct, &node.direct) {
|
|
||||||
(Some(_), Some(_)) => {
|
|
||||||
log::warn!("duplicate direct dependency for {name}@{version}");
|
|
||||||
}
|
|
||||||
|
|
||||||
(None, Some(_)) => {
|
|
||||||
current_node.direct = node.direct;
|
|
||||||
}
|
|
||||||
|
|
||||||
(_, _) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct DownloadedDependencyGraphNode {
|
pub struct DownloadedDependencyGraphNode {
|
||||||
|
|
89
src/main.rs
89
src/main.rs
|
@ -1,27 +1,37 @@
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
use crate::cli::version::{check_for_updates, get_or_download_version};
|
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
|
||||||
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Parser;
|
use clap::{builder::styling::AnsiColor, Parser};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use indicatif::MultiProgress;
|
|
||||||
use indicatif_log_bridge::LogWrapper;
|
|
||||||
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
|
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
use tracing::instrument;
|
||||||
|
use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
|
||||||
|
use tracing_subscriber::{
|
||||||
|
filter::LevelFilter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer,
|
||||||
|
};
|
||||||
|
|
||||||
mod cli;
|
mod cli;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
|
const STYLES: clap::builder::Styles = clap::builder::Styles::styled()
|
||||||
|
.header(AnsiColor::Yellow.on_default().underline())
|
||||||
|
.usage(AnsiColor::Yellow.on_default().underline())
|
||||||
|
.literal(AnsiColor::Green.on_default().bold())
|
||||||
|
.placeholder(AnsiColor::Cyan.on_default());
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[clap(
|
#[clap(
|
||||||
version,
|
version,
|
||||||
about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
about = "A package manager for the Luau programming language",
|
||||||
|
long_about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||||
)]
|
)]
|
||||||
#[command(disable_version_flag = true)]
|
#[command(disable_version_flag = true, styles = STYLES)]
|
||||||
struct Cli {
|
struct Cli {
|
||||||
/// Print version
|
/// Print version
|
||||||
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
|
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
|
||||||
|
@ -31,6 +41,7 @@ struct Cli {
|
||||||
subcommand: cli::commands::Subcommand,
|
subcommand: cli::commands::Subcommand,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "trace")]
|
||||||
async fn get_linkable_dir(path: &Path) -> PathBuf {
|
async fn get_linkable_dir(path: &Path) -> PathBuf {
|
||||||
let mut curr_path = PathBuf::new();
|
let mut curr_path = PathBuf::new();
|
||||||
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
|
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
|
||||||
|
@ -61,7 +72,7 @@ async fn get_linkable_dir(path: &Path) -> PathBuf {
|
||||||
|
|
||||||
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
|
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
|
||||||
if let Err(err) = fs::remove_file(&try_path).await {
|
if let Err(err) = fs::remove_file(&try_path).await {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"failed to remove temporary file at {}: {err}",
|
"failed to remove temporary file at {}: {err}",
|
||||||
try_path.display()
|
try_path.display()
|
||||||
);
|
);
|
||||||
|
@ -107,7 +118,12 @@ async fn run() -> anyhow::Result<()> {
|
||||||
// on unix systems
|
// on unix systems
|
||||||
let status = std::process::Command::new("lune")
|
let status = std::process::Command::new("lune")
|
||||||
.arg("run")
|
.arg("run")
|
||||||
.arg(exe.with_extension(""))
|
.arg(
|
||||||
|
exe.parent()
|
||||||
|
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
|
||||||
|
.unwrap_or(exe)
|
||||||
|
.with_extension("luau"),
|
||||||
|
)
|
||||||
.arg("--")
|
.arg("--")
|
||||||
.args(std::env::args_os().skip(1))
|
.args(std::env::args_os().skip(1))
|
||||||
.current_dir(cwd)
|
.current_dir(cwd)
|
||||||
|
@ -117,6 +133,39 @@ async fn run() -> anyhow::Result<()> {
|
||||||
std::process::exit(status.code().unwrap());
|
std::process::exit(status.code().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let indicatif_layer = IndicatifLayer::new().with_filter(IndicatifFilter::new(false));
|
||||||
|
|
||||||
|
let tracing_env_filter = EnvFilter::builder()
|
||||||
|
.with_default_directive(LevelFilter::INFO.into())
|
||||||
|
.from_env_lossy()
|
||||||
|
.add_directive("reqwest=info".parse().unwrap())
|
||||||
|
.add_directive("rustls=info".parse().unwrap())
|
||||||
|
.add_directive("tokio_util=info".parse().unwrap())
|
||||||
|
.add_directive("goblin=info".parse().unwrap())
|
||||||
|
.add_directive("tower=info".parse().unwrap())
|
||||||
|
.add_directive("hyper=info".parse().unwrap())
|
||||||
|
.add_directive("h2=info".parse().unwrap());
|
||||||
|
|
||||||
|
let fmt_layer =
|
||||||
|
tracing_subscriber::fmt::layer().with_writer(indicatif_layer.inner().get_stderr_writer());
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let fmt_layer = fmt_layer.with_timer(tracing_subscriber::fmt::time::uptime());
|
||||||
|
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
let fmt_layer = fmt_layer
|
||||||
|
.pretty()
|
||||||
|
.with_timer(())
|
||||||
|
.with_line_number(false)
|
||||||
|
.with_file(false)
|
||||||
|
.with_target(false);
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(tracing_env_filter)
|
||||||
|
.with(fmt_layer)
|
||||||
|
.with(indicatif_layer)
|
||||||
|
.init();
|
||||||
|
|
||||||
let (project_root_dir, project_workspace_dir) = 'finder: {
|
let (project_root_dir, project_workspace_dir) = 'finder: {
|
||||||
let mut current_path = Some(cwd.clone());
|
let mut current_path = Some(cwd.clone());
|
||||||
let mut project_root = None::<PathBuf>;
|
let mut project_root = None::<PathBuf>;
|
||||||
|
@ -137,6 +186,7 @@ async fn run() -> anyhow::Result<()> {
|
||||||
path,
|
path,
|
||||||
manifest.workspace_members.iter().map(|s| s.as_str()),
|
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||||
false,
|
false,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to get workspace members")
|
.context("failed to get workspace members")
|
||||||
|
@ -178,16 +228,13 @@ async fn run() -> anyhow::Result<()> {
|
||||||
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
|
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
|
||||||
};
|
};
|
||||||
|
|
||||||
let multi = {
|
tracing::trace!(
|
||||||
let logger = pretty_env_logger::formatted_builder()
|
"project root: {}\nworkspace root: {}",
|
||||||
.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"))
|
project_root_dir.display(),
|
||||||
.build();
|
project_workspace_dir
|
||||||
let multi = MultiProgress::new();
|
.as_ref()
|
||||||
|
.map_or("none".to_string(), |p| p.display().to_string())
|
||||||
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
|
);
|
||||||
|
|
||||||
multi
|
|
||||||
};
|
|
||||||
|
|
||||||
let home_dir = home_dir()?;
|
let home_dir = home_dir()?;
|
||||||
let data_dir = home_dir.join("data");
|
let data_dir = home_dir.join("data");
|
||||||
|
@ -204,7 +251,7 @@ async fn run() -> anyhow::Result<()> {
|
||||||
}
|
}
|
||||||
.join("cas");
|
.join("cas");
|
||||||
|
|
||||||
log::debug!("using cas dir in {}", cas_dir.display());
|
tracing::debug!("using cas dir in {}", cas_dir.display());
|
||||||
|
|
||||||
let project = Project::new(
|
let project = Project::new(
|
||||||
project_root_dir,
|
project_root_dir,
|
||||||
|
@ -243,7 +290,7 @@ async fn run() -> anyhow::Result<()> {
|
||||||
.and_then(|manifest| manifest.pesde_version);
|
.and_then(|manifest| manifest.pesde_version);
|
||||||
|
|
||||||
let exe_path = if let Some(version) = target_version {
|
let exe_path = if let Some(version) = target_version {
|
||||||
get_or_download_version(&reqwest, &version, false).await?
|
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await?
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -265,7 +312,7 @@ async fn run() -> anyhow::Result<()> {
|
||||||
|
|
||||||
let cli = Cli::parse();
|
let cli = Cli::parse();
|
||||||
|
|
||||||
cli.subcommand.run(project, multi, reqwest).await
|
cli.subcommand.run(project, reqwest).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
use relative_path::RelativePathBuf;
|
|
||||||
use semver::Version;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{overrides::OverrideKey, target::Target},
|
manifest::{overrides::OverrideKey, target::Target},
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::specifiers::DependencySpecifiers,
|
source::specifiers::DependencySpecifiers,
|
||||||
};
|
};
|
||||||
|
use relative_path::RelativePathBuf;
|
||||||
|
use semver::Version;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// Overrides
|
/// Overrides
|
||||||
pub mod overrides;
|
pub mod overrides;
|
||||||
|
@ -107,6 +107,7 @@ pub enum DependencyType {
|
||||||
|
|
||||||
impl Manifest {
|
impl Manifest {
|
||||||
/// Get all dependencies from the manifest
|
/// Get all dependencies from the manifest
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub fn all_dependencies(
|
pub fn all_dependencies(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
|
|
|
@ -2,7 +2,7 @@ use relative_path::RelativePathBuf;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeSet,
|
collections::{BTreeMap, BTreeSet},
|
||||||
fmt::{Display, Formatter},
|
fmt::{Display, Formatter},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
@ -68,6 +68,11 @@ impl TargetKind {
|
||||||
|
|
||||||
format!("{dependency}_packages")
|
format!("{dependency}_packages")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether this target is a Roblox target
|
||||||
|
pub fn is_roblox(&self) -> bool {
|
||||||
|
matches!(self, TargetKind::Roblox | TargetKind::RobloxServer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A target of a package
|
/// A target of a package
|
||||||
|
@ -77,7 +82,7 @@ pub enum Target {
|
||||||
/// A Roblox target
|
/// A Roblox target
|
||||||
Roblox {
|
Roblox {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The files to include in the sync tool's config
|
/// The files to include in the sync tool's config
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -86,7 +91,7 @@ pub enum Target {
|
||||||
/// A Roblox server target
|
/// A Roblox server target
|
||||||
RobloxServer {
|
RobloxServer {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The files to include in the sync tool's config
|
/// The files to include in the sync tool's config
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -95,20 +100,26 @@ pub enum Target {
|
||||||
/// A Lune target
|
/// A Lune target
|
||||||
Lune {
|
Lune {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The path to the bin export file
|
/// The path to the bin export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
bin: Option<RelativePathBuf>,
|
bin: Option<RelativePathBuf>,
|
||||||
|
/// The exported scripts
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
scripts: BTreeMap<String, RelativePathBuf>,
|
||||||
},
|
},
|
||||||
/// A Luau target
|
/// A Luau target
|
||||||
Luau {
|
Luau {
|
||||||
/// The path to the lib export file
|
/// The path to the lib export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
lib: Option<RelativePathBuf>,
|
lib: Option<RelativePathBuf>,
|
||||||
/// The path to the bin export file
|
/// The path to the bin export file
|
||||||
#[serde(default)]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
bin: Option<RelativePathBuf>,
|
bin: Option<RelativePathBuf>,
|
||||||
|
/// The exported scripts
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
scripts: BTreeMap<String, RelativePathBuf>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,6 +162,15 @@ impl Target {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the scripts exported by this target
|
||||||
|
pub fn scripts(&self) -> Option<&BTreeMap<String, RelativePathBuf>> {
|
||||||
|
match self {
|
||||||
|
Target::Lune { scripts, .. } => Some(scripts),
|
||||||
|
Target::Luau { scripts, .. } => Some(scripts),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Target {
|
impl Display for Target {
|
||||||
|
|
22
src/names.rs
22
src/names.rs
|
@ -35,8 +35,16 @@ impl FromStr for PackageName {
|
||||||
.ok_or(Self::Err::InvalidFormat(s.to_string()))?;
|
.ok_or(Self::Err::InvalidFormat(s.to_string()))?;
|
||||||
|
|
||||||
for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] {
|
for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] {
|
||||||
if part.len() < 3 || part.len() > 32 {
|
let min_len = match reason {
|
||||||
return Err(Self::Err::InvalidLength(reason, part.to_string()));
|
ErrorReason::Scope => 3,
|
||||||
|
ErrorReason::Name => 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !(min_len..=32).contains(&part.len()) {
|
||||||
|
return Err(match reason {
|
||||||
|
ErrorReason::Scope => Self::Err::InvalidScopeLength(part.to_string()),
|
||||||
|
ErrorReason::Name => Self::Err::InvalidNameLength(part.to_string()),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if part.chars().all(|c| c.is_ascii_digit()) {
|
if part.chars().all(|c| c.is_ascii_digit()) {
|
||||||
|
@ -231,9 +239,13 @@ pub mod errors {
|
||||||
#[error("package {0} `{1}` starts or ends with an underscore")]
|
#[error("package {0} `{1}` starts or ends with an underscore")]
|
||||||
PrePostfixUnderscore(ErrorReason, String),
|
PrePostfixUnderscore(ErrorReason, String),
|
||||||
|
|
||||||
/// The package name is not within 3-32 characters long
|
/// The package name's scope part is not within 3-32 characters long
|
||||||
#[error("package {0} `{1}` is not within 3-32 characters long")]
|
#[error("package scope `{0}` is not within 3-32 characters long")]
|
||||||
InvalidLength(ErrorReason, String),
|
InvalidScopeLength(String),
|
||||||
|
|
||||||
|
/// The package name's name part is not within 1-32 characters long
|
||||||
|
#[error("package name `{0}` is not within 1-32 characters long")]
|
||||||
|
InvalidNameLength(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when working with Wally package names
|
/// Errors that can occur when working with Wally package names
|
||||||
|
|
|
@ -3,6 +3,7 @@ use fs_err::tokio as fs;
|
||||||
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// Set up a git repository for patches
|
/// Set up a git repository for patches
|
||||||
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
||||||
|
@ -69,6 +70,7 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Apply patches to the project's dependencies
|
/// Apply patches to the project's dependencies
|
||||||
|
#[instrument(skip(self, graph), level = "debug")]
|
||||||
pub async fn apply_patches(
|
pub async fn apply_patches(
|
||||||
&self,
|
&self,
|
||||||
graph: &DownloadedGraph,
|
graph: &DownloadedGraph,
|
||||||
|
@ -97,7 +99,7 @@ impl Project {
|
||||||
.get(&name)
|
.get(&name)
|
||||||
.and_then(|versions| versions.get(&version_id))
|
.and_then(|versions| versions.get(&version_id))
|
||||||
else {
|
else {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"patch for {name}@{version_id} not applied because it is not in the graph"
|
"patch for {name}@{version_id} not applied because it is not in the graph"
|
||||||
);
|
);
|
||||||
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
|
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
|
||||||
|
@ -114,7 +116,7 @@ impl Project {
|
||||||
);
|
);
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
log::debug!("applying patch to {name}@{version_id}");
|
tracing::debug!("applying patch to {name}@{version_id}");
|
||||||
|
|
||||||
let patch = match fs::read(&patch_path).await {
|
let patch = match fs::read(&patch_path).await {
|
||||||
Ok(patch) => patch,
|
Ok(patch) => patch,
|
||||||
|
@ -195,7 +197,9 @@ impl Project {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
|
tracing::debug!(
|
||||||
|
"patch applied to {name}@{version_id}, removing .git directory"
|
||||||
|
);
|
||||||
|
|
||||||
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
|
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
|
||||||
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))
|
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))
|
||||||
|
|
508
src/resolver.rs
508
src/resolver.rs
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::{insert_node, DependencyGraph, DependencyGraphNode},
|
lockfile::{DependencyGraph, DependencyGraphNode},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
source::{
|
source::{
|
||||||
|
@ -11,10 +11,55 @@ use crate::{
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME,
|
Project, DEFAULT_INDEX_NAME,
|
||||||
};
|
};
|
||||||
use std::collections::{HashMap, HashSet, VecDeque};
|
use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque};
|
||||||
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
|
fn insert_node(
|
||||||
|
graph: &mut DependencyGraph,
|
||||||
|
name: PackageNames,
|
||||||
|
version: VersionId,
|
||||||
|
mut node: DependencyGraphNode,
|
||||||
|
is_top_level: bool,
|
||||||
|
) {
|
||||||
|
if !is_top_level && node.direct.take().is_some() {
|
||||||
|
tracing::debug!(
|
||||||
|
"tried to insert {name}@{version} as direct dependency from a non top-level context",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
match graph
|
||||||
|
.entry(name.clone())
|
||||||
|
.or_default()
|
||||||
|
.entry(version.clone())
|
||||||
|
{
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(node);
|
||||||
|
}
|
||||||
|
Entry::Occupied(existing) => {
|
||||||
|
let current_node = existing.into_mut();
|
||||||
|
|
||||||
|
match (¤t_node.direct, &node.direct) {
|
||||||
|
(Some(_), Some(_)) => {
|
||||||
|
tracing::warn!("duplicate direct dependency for {name}@{version}");
|
||||||
|
}
|
||||||
|
|
||||||
|
(None, Some(_)) => {
|
||||||
|
current_node.direct = node.direct;
|
||||||
|
}
|
||||||
|
|
||||||
|
(_, _) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Create a dependency graph from the project's manifest
|
/// Create a dependency graph from the project's manifest
|
||||||
|
#[instrument(
|
||||||
|
skip(self, previous_graph, refreshed_sources),
|
||||||
|
ret(level = "trace"),
|
||||||
|
level = "debug"
|
||||||
|
)]
|
||||||
pub async fn dependency_graph(
|
pub async fn dependency_graph(
|
||||||
&self,
|
&self,
|
||||||
previous_graph: Option<&DependencyGraph>,
|
previous_graph: Option<&DependencyGraph>,
|
||||||
|
@ -39,7 +84,7 @@ impl Project {
|
||||||
if let Some(previous_graph) = previous_graph {
|
if let Some(previous_graph) = previous_graph {
|
||||||
for (name, versions) in previous_graph {
|
for (name, versions) in previous_graph {
|
||||||
for (version, node) in versions {
|
for (version, node) in versions {
|
||||||
let Some((_, specifier, source_ty)) = &node.direct else {
|
let Some((old_alias, specifier, source_ty)) = &node.direct else {
|
||||||
// this is not a direct dependency, will be added if it's still being used later
|
// this is not a direct dependency, will be added if it's still being used later
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
@ -51,13 +96,16 @@ impl Project {
|
||||||
|
|
||||||
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
|
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
|
||||||
else {
|
else {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"dependency {name}@{version} from old dependency graph is no longer in the manifest",
|
"dependency {name}@{version} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
log::debug!("resolved {}@{} from old dependency graph", name, version);
|
let span = tracing::info_span!("resolve from old graph", alias);
|
||||||
|
let _guard = span.enter();
|
||||||
|
|
||||||
|
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
|
||||||
insert_node(
|
insert_node(
|
||||||
&mut graph,
|
&mut graph,
|
||||||
name.clone(),
|
name.clone(),
|
||||||
|
@ -72,22 +120,24 @@ impl Project {
|
||||||
let mut queue = node
|
let mut queue = node
|
||||||
.dependencies
|
.dependencies
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, (version, _))| (name, version, 0usize))
|
.map(|(name, (version, dep_alias))| {
|
||||||
|
(
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
vec![alias.to_string(), dep_alias.to_string()],
|
||||||
|
)
|
||||||
|
})
|
||||||
.collect::<VecDeque<_>>();
|
.collect::<VecDeque<_>>();
|
||||||
|
|
||||||
while let Some((dep_name, dep_version, depth)) = queue.pop_front() {
|
while let Some((dep_name, dep_version, path)) = queue.pop_front() {
|
||||||
|
let inner_span =
|
||||||
|
tracing::info_span!("resolve dependency", path = path.join(">"));
|
||||||
|
let _inner_guard = inner_span.enter();
|
||||||
if let Some(dep_node) = previous_graph
|
if let Some(dep_node) = previous_graph
|
||||||
.get(dep_name)
|
.get(dep_name)
|
||||||
.and_then(|v| v.get(dep_version))
|
.and_then(|v| v.get(dep_version))
|
||||||
{
|
{
|
||||||
log::debug!(
|
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
|
||||||
"{}resolved dependency {}@{} from {}@{}",
|
|
||||||
"\t".repeat(depth),
|
|
||||||
dep_name,
|
|
||||||
dep_version,
|
|
||||||
name,
|
|
||||||
version
|
|
||||||
);
|
|
||||||
insert_node(
|
insert_node(
|
||||||
&mut graph,
|
&mut graph,
|
||||||
dep_name.clone(),
|
dep_name.clone(),
|
||||||
|
@ -99,15 +149,20 @@ impl Project {
|
||||||
dep_node
|
dep_node
|
||||||
.dependencies
|
.dependencies
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, (version, _))| (name, version, depth + 1))
|
.map(|(name, (version, alias))| {
|
||||||
|
(
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
path.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(std::iter::once(alias.to_string()))
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
})
|
||||||
.for_each(|dep| queue.push_back(dep));
|
.for_each(|dep| queue.push_back(dep));
|
||||||
} else {
|
} else {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"dependency {}@{} from {}@{} not found in previous graph",
|
"dependency {dep_name}@{dep_version} not found in previous graph"
|
||||||
dep_name,
|
|
||||||
dep_version,
|
|
||||||
name,
|
|
||||||
version
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,7 +174,6 @@ impl Project {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|((spec, ty), alias)| {
|
.map(|((spec, ty), alias)| {
|
||||||
(
|
(
|
||||||
alias.to_string(),
|
|
||||||
spec,
|
spec,
|
||||||
ty,
|
ty,
|
||||||
None::<(PackageNames, VersionId)>,
|
None::<(PackageNames, VersionId)>,
|
||||||
|
@ -130,219 +184,233 @@ impl Project {
|
||||||
})
|
})
|
||||||
.collect::<VecDeque<_>>();
|
.collect::<VecDeque<_>>();
|
||||||
|
|
||||||
while let Some((alias, specifier, ty, dependant, path, overridden, target)) =
|
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
|
||||||
queue.pop_front()
|
async {
|
||||||
{
|
let alias = path.last().unwrap().clone();
|
||||||
let depth = path.len() - 1;
|
let depth = path.len() - 1;
|
||||||
|
|
||||||
log::debug!(
|
tracing::debug!("resolving {specifier} ({ty:?})");
|
||||||
"{}resolving {specifier} ({alias}) from {dependant:?}",
|
let source = match &specifier {
|
||||||
"\t".repeat(depth)
|
DependencySpecifiers::Pesde(specifier) => {
|
||||||
);
|
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
||||||
let source = match &specifier {
|
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
DependencySpecifiers::Pesde(specifier) => {
|
|
||||||
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
|
||||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
|
||||||
|
|
||||||
manifest
|
manifest
|
||||||
.indices
|
.indices
|
||||||
.get(index_name)
|
.get(index_name)
|
||||||
.ok_or(errors::DependencyGraphError::IndexNotFound(
|
.ok_or(errors::DependencyGraphError::IndexNotFound(
|
||||||
index_name.to_string(),
|
index_name.to_string(),
|
||||||
))?
|
))?
|
||||||
.clone()
|
.clone()
|
||||||
|
} else {
|
||||||
|
let index_url = specifier.index.clone().unwrap();
|
||||||
|
|
||||||
|
index_url
|
||||||
|
.clone()
|
||||||
|
.try_into()
|
||||||
|
// specifiers in indices store the index url in this field
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
PackageSources::Pesde(PesdePackageSource::new(index_url))
|
||||||
|
}
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
DependencySpecifiers::Wally(specifier) => {
|
||||||
|
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
||||||
|
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
|
|
||||||
|
manifest
|
||||||
|
.wally_indices
|
||||||
|
.get(index_name)
|
||||||
|
.ok_or(errors::DependencyGraphError::WallyIndexNotFound(
|
||||||
|
index_name.to_string(),
|
||||||
|
))?
|
||||||
|
.clone()
|
||||||
|
} else {
|
||||||
|
let index_url = specifier.index.clone().unwrap();
|
||||||
|
|
||||||
|
index_url
|
||||||
|
.clone()
|
||||||
|
.try_into()
|
||||||
|
// specifiers in indices store the index url in this field
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
PackageSources::Wally(crate::source::wally::WallyPackageSource::new(index_url))
|
||||||
|
}
|
||||||
|
DependencySpecifiers::Git(specifier) => PackageSources::Git(
|
||||||
|
crate::source::git::GitPackageSource::new(specifier.repo.clone()),
|
||||||
|
),
|
||||||
|
DependencySpecifiers::Workspace(_) => {
|
||||||
|
PackageSources::Workspace(crate::source::workspace::WorkspacePackageSource)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if refreshed_sources.insert(source.clone()) {
|
||||||
|
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (name, resolved) = source
|
||||||
|
.resolve(&specifier, self, target, refreshed_sources)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Box::new(e.into()))?;
|
||||||
|
|
||||||
|
let Some(target_version_id) = graph
|
||||||
|
.get(&name)
|
||||||
|
.and_then(|versions| {
|
||||||
|
versions
|
||||||
|
.keys()
|
||||||
|
// only consider versions that are compatible with the specifier
|
||||||
|
.filter(|ver| resolved.contains_key(ver))
|
||||||
|
.max()
|
||||||
|
})
|
||||||
|
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
|
||||||
|
.cloned()
|
||||||
|
else {
|
||||||
|
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
|
||||||
|
format!("{specifier} ({target})"),
|
||||||
|
)));
|
||||||
|
};
|
||||||
|
|
||||||
|
let resolved_ty = if (is_published_package || depth == 0) && ty == DependencyType::Peer
|
||||||
|
{
|
||||||
|
DependencyType::Standard
|
||||||
|
} else {
|
||||||
|
ty
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some((dependant_name, dependant_version_id)) = dependant {
|
||||||
|
graph
|
||||||
|
.get_mut(&dependant_name)
|
||||||
|
.and_then(|versions| versions.get_mut(&dependant_version_id))
|
||||||
|
.and_then(|node| {
|
||||||
|
node.dependencies
|
||||||
|
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let pkg_ref = &resolved[&target_version_id];
|
||||||
|
|
||||||
|
if let Some(already_resolved) = graph
|
||||||
|
.get_mut(&name)
|
||||||
|
.and_then(|versions| versions.get_mut(&target_version_id))
|
||||||
|
{
|
||||||
|
tracing::debug!(
|
||||||
|
"{}@{} already resolved",
|
||||||
|
name,
|
||||||
|
target_version_id
|
||||||
|
);
|
||||||
|
|
||||||
|
if std::mem::discriminant(&already_resolved.pkg_ref)
|
||||||
|
!= std::mem::discriminant(pkg_ref)
|
||||||
|
{
|
||||||
|
tracing::warn!(
|
||||||
|
"resolved package {name}@{target_version_id} has a different source than previously resolved one, this may cause issues",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if already_resolved.resolved_ty == DependencyType::Peer {
|
||||||
|
already_resolved.resolved_ty = resolved_ty;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ty == DependencyType::Peer && depth == 0 {
|
||||||
|
already_resolved.is_peer = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if already_resolved.direct.is_none() && depth == 0 {
|
||||||
|
already_resolved.direct = Some((alias.clone(), specifier.clone(), ty));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let node = DependencyGraphNode {
|
||||||
|
direct: if depth == 0 {
|
||||||
|
Some((alias.clone(), specifier.clone(), ty))
|
||||||
} else {
|
} else {
|
||||||
let index_url = specifier.index.clone().unwrap();
|
None
|
||||||
|
},
|
||||||
index_url
|
pkg_ref: pkg_ref.clone(),
|
||||||
.clone()
|
dependencies: Default::default(),
|
||||||
.try_into()
|
resolved_ty,
|
||||||
// specifiers in indices store the index url in this field
|
is_peer: if depth == 0 {
|
||||||
.unwrap()
|
false
|
||||||
};
|
|
||||||
|
|
||||||
PackageSources::Pesde(PesdePackageSource::new(index_url))
|
|
||||||
}
|
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
DependencySpecifiers::Wally(specifier) => {
|
|
||||||
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
|
||||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
|
||||||
|
|
||||||
manifest
|
|
||||||
.wally_indices
|
|
||||||
.get(index_name)
|
|
||||||
.ok_or(errors::DependencyGraphError::WallyIndexNotFound(
|
|
||||||
index_name.to_string(),
|
|
||||||
))?
|
|
||||||
.clone()
|
|
||||||
} else {
|
} else {
|
||||||
let index_url = specifier.index.clone().unwrap();
|
ty == DependencyType::Peer
|
||||||
|
},
|
||||||
|
};
|
||||||
|
insert_node(
|
||||||
|
&mut graph,
|
||||||
|
name.clone(),
|
||||||
|
target_version_id.clone(),
|
||||||
|
node.clone(),
|
||||||
|
depth == 0,
|
||||||
|
);
|
||||||
|
|
||||||
index_url
|
tracing::debug!(
|
||||||
.clone()
|
"resolved {}@{} from new dependency graph",
|
||||||
.try_into()
|
|
||||||
// specifiers in indices store the index url in this field
|
|
||||||
.unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
PackageSources::Wally(crate::source::wally::WallyPackageSource::new(index_url))
|
|
||||||
}
|
|
||||||
DependencySpecifiers::Git(specifier) => PackageSources::Git(
|
|
||||||
crate::source::git::GitPackageSource::new(specifier.repo.clone()),
|
|
||||||
),
|
|
||||||
DependencySpecifiers::Workspace(_) => {
|
|
||||||
PackageSources::Workspace(crate::source::workspace::WorkspacePackageSource)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if refreshed_sources.insert(source.clone()) {
|
|
||||||
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (name, resolved) = source
|
|
||||||
.resolve(&specifier, self, target, refreshed_sources)
|
|
||||||
.await
|
|
||||||
.map_err(|e| Box::new(e.into()))?;
|
|
||||||
|
|
||||||
let Some(target_version_id) = graph
|
|
||||||
.get(&name)
|
|
||||||
.and_then(|versions| {
|
|
||||||
versions
|
|
||||||
.keys()
|
|
||||||
// only consider versions that are compatible with the specifier
|
|
||||||
.filter(|ver| resolved.contains_key(ver))
|
|
||||||
.max()
|
|
||||||
})
|
|
||||||
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
|
|
||||||
.cloned()
|
|
||||||
else {
|
|
||||||
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
|
|
||||||
format!("{specifier} ({target})"),
|
|
||||||
)));
|
|
||||||
};
|
|
||||||
|
|
||||||
let resolved_ty = if (is_published_package || depth == 0) && ty == DependencyType::Peer
|
|
||||||
{
|
|
||||||
DependencyType::Standard
|
|
||||||
} else {
|
|
||||||
ty
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some((dependant_name, dependant_version_id)) = dependant {
|
|
||||||
graph
|
|
||||||
.get_mut(&dependant_name)
|
|
||||||
.and_then(|versions| versions.get_mut(&dependant_version_id))
|
|
||||||
.and_then(|node| {
|
|
||||||
node.dependencies
|
|
||||||
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let pkg_ref = &resolved[&target_version_id];
|
|
||||||
|
|
||||||
if let Some(already_resolved) = graph
|
|
||||||
.get_mut(&name)
|
|
||||||
.and_then(|versions| versions.get_mut(&target_version_id))
|
|
||||||
{
|
|
||||||
log::debug!(
|
|
||||||
"{}{}@{} already resolved",
|
|
||||||
"\t".repeat(depth),
|
|
||||||
name,
|
name,
|
||||||
target_version_id
|
target_version_id
|
||||||
);
|
);
|
||||||
|
|
||||||
if std::mem::discriminant(&already_resolved.pkg_ref)
|
for (dependency_alias, (dependency_spec, dependency_ty)) in
|
||||||
!= std::mem::discriminant(pkg_ref)
|
pkg_ref.dependencies().clone()
|
||||||
{
|
{
|
||||||
log::warn!(
|
if dependency_ty == DependencyType::Dev {
|
||||||
"resolved package {name}@{target_version_id} has a different source than the previously resolved one, this may cause issues",
|
// dev dependencies of dependencies are to be ignored
|
||||||
);
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let overridden = manifest.overrides.iter().find_map(|(key, spec)| {
|
||||||
|
key.0.iter().find_map(|override_path| {
|
||||||
|
// if the path up until the last element is the same as the current path,
|
||||||
|
// and the last element in the path is the dependency alias,
|
||||||
|
// then the specifier is to be overridden
|
||||||
|
(path.len() == override_path.len() - 1
|
||||||
|
&& path == override_path[..override_path.len() - 1]
|
||||||
|
&& override_path.last() == Some(&dependency_alias))
|
||||||
|
.then_some(spec)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if overridden.is_some() {
|
||||||
|
tracing::debug!(
|
||||||
|
"overridden specifier found for {} ({dependency_spec})",
|
||||||
|
path.iter()
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.chain(std::iter::once(dependency_alias.as_str()))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(">"),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
queue.push_back((
|
||||||
|
overridden.cloned().unwrap_or(dependency_spec),
|
||||||
|
dependency_ty,
|
||||||
|
Some((name.clone(), target_version_id.clone())),
|
||||||
|
path.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(std::iter::once(dependency_alias))
|
||||||
|
.collect(),
|
||||||
|
overridden.is_some(),
|
||||||
|
*target_version_id.target(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if already_resolved.resolved_ty == DependencyType::Peer
|
Ok(())
|
||||||
&& resolved_ty == DependencyType::Standard
|
|
||||||
{
|
|
||||||
already_resolved.resolved_ty = resolved_ty;
|
|
||||||
}
|
|
||||||
|
|
||||||
if already_resolved.direct.is_none() && depth == 0 {
|
|
||||||
already_resolved.direct = Some((alias.clone(), specifier.clone(), ty));
|
|
||||||
}
|
|
||||||
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let node = DependencyGraphNode {
|
|
||||||
direct: if depth == 0 {
|
|
||||||
Some((alias.clone(), specifier.clone(), ty))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
pkg_ref: pkg_ref.clone(),
|
|
||||||
dependencies: Default::default(),
|
|
||||||
resolved_ty,
|
|
||||||
};
|
|
||||||
insert_node(
|
|
||||||
&mut graph,
|
|
||||||
name.clone(),
|
|
||||||
target_version_id.clone(),
|
|
||||||
node.clone(),
|
|
||||||
depth == 0,
|
|
||||||
);
|
|
||||||
|
|
||||||
log::debug!(
|
|
||||||
"{}resolved {}@{} from new dependency graph",
|
|
||||||
"\t".repeat(depth),
|
|
||||||
name,
|
|
||||||
target_version_id
|
|
||||||
);
|
|
||||||
|
|
||||||
for (dependency_alias, (dependency_spec, dependency_ty)) in
|
|
||||||
pkg_ref.dependencies().clone()
|
|
||||||
{
|
|
||||||
if dependency_ty == DependencyType::Dev {
|
|
||||||
// dev dependencies of dependencies are to be ignored
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let overridden = manifest.overrides.iter().find_map(|(key, spec)| {
|
|
||||||
key.0.iter().find_map(|override_path| {
|
|
||||||
// if the path up until the last element is the same as the current path,
|
|
||||||
// and the last element in the path is the dependency alias,
|
|
||||||
// then the specifier is to be overridden
|
|
||||||
(path.len() == override_path.len() - 1
|
|
||||||
&& path == override_path[..override_path.len() - 1]
|
|
||||||
&& override_path.last() == Some(&dependency_alias))
|
|
||||||
.then_some(spec)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
if overridden.is_some() {
|
|
||||||
log::debug!(
|
|
||||||
"{}overridden specifier found for {dependency_alias} ({dependency_spec})",
|
|
||||||
"\t".repeat(depth)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
queue.push_back((
|
|
||||||
dependency_alias,
|
|
||||||
overridden.cloned().unwrap_or(dependency_spec),
|
|
||||||
dependency_ty,
|
|
||||||
Some((name.clone(), target_version_id.clone())),
|
|
||||||
path.iter()
|
|
||||||
.cloned()
|
|
||||||
.chain(std::iter::once(alias.to_string()))
|
|
||||||
.collect(),
|
|
||||||
overridden.is_some(),
|
|
||||||
*target_version_id.target(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
.instrument(tracing::info_span!("resolve new/changed", path = path.join(">")))
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, versions) in &graph {
|
for (name, versions) in &mut graph {
|
||||||
for (version_id, node) in versions {
|
for (version_id, node) in versions {
|
||||||
|
if node.is_peer && node.direct.is_none() {
|
||||||
|
node.resolved_ty = DependencyType::Peer;
|
||||||
|
}
|
||||||
|
|
||||||
if node.resolved_ty == DependencyType::Peer {
|
if node.resolved_ty == DependencyType::Peer {
|
||||||
log::warn!("peer dependency {name}@{version_id} was not resolved");
|
tracing::warn!("peer dependency {name}@{version_id} was not resolved");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
use crate::Project;
|
use crate::Project;
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fmt::{Display, Formatter},
|
fmt::{Debug, Display, Formatter},
|
||||||
io::{BufRead, BufReader},
|
|
||||||
path::Path,
|
path::Path,
|
||||||
process::{Command, Stdio},
|
process::Stdio,
|
||||||
thread::spawn,
|
|
||||||
};
|
};
|
||||||
|
use tokio::{
|
||||||
|
io::{AsyncBufReadExt, BufReader},
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// Script names used by pesde
|
/// Script names used by pesde
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
|
@ -28,7 +31,8 @@ impl Display for ScriptName {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
#[instrument(skip(project), level = "debug")]
|
||||||
|
pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<OsStr> + Debug>(
|
||||||
script_name: ScriptName,
|
script_name: ScriptName,
|
||||||
script_path: &Path,
|
script_path: &Path,
|
||||||
args: A,
|
args: A,
|
||||||
|
@ -47,20 +51,20 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||||
.spawn()
|
.spawn()
|
||||||
{
|
{
|
||||||
Ok(mut child) => {
|
Ok(mut child) => {
|
||||||
let stdout = BufReader::new(child.stdout.take().unwrap());
|
let mut stdout = BufReader::new(child.stdout.take().unwrap()).lines();
|
||||||
let stderr = BufReader::new(child.stderr.take().unwrap());
|
let mut stderr = BufReader::new(child.stderr.take().unwrap()).lines();
|
||||||
|
|
||||||
let script = script_name.to_string();
|
let script = script_name.to_string();
|
||||||
let script_2 = script.to_string();
|
let script_2 = script.to_string();
|
||||||
|
|
||||||
spawn(move || {
|
tokio::spawn(async move {
|
||||||
for line in stderr.lines() {
|
while let Some(line) = stderr.next_line().await.transpose() {
|
||||||
match line {
|
match line {
|
||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
log::error!("[{script}]: {line}");
|
tracing::error!("[{script}]: {line}");
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("ERROR IN READING STDERR OF {script}: {e}");
|
tracing::error!("ERROR IN READING STDERR OF {script}: {e}");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,18 +73,18 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||||
|
|
||||||
let mut stdout_str = String::new();
|
let mut stdout_str = String::new();
|
||||||
|
|
||||||
for line in stdout.lines() {
|
while let Some(line) = stdout.next_line().await.transpose() {
|
||||||
match line {
|
match line {
|
||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
if return_stdout {
|
if return_stdout {
|
||||||
stdout_str.push_str(&line);
|
stdout_str.push_str(&line);
|
||||||
stdout_str.push('\n');
|
stdout_str.push('\n');
|
||||||
} else {
|
} else {
|
||||||
log::info!("[{script_2}]: {line}");
|
tracing::info!("[{script_2}]: {line}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
|
tracing::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,7 +97,7 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
log::warn!("Lune could not be found in PATH: {e}");
|
tracing::warn!("Lune could not be found in PATH: {e}");
|
||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
|
fmt::Debug,
|
||||||
future::Future,
|
future::Future,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
@ -17,6 +18,7 @@ use tokio::{
|
||||||
io::{AsyncReadExt, AsyncWriteExt},
|
io::{AsyncReadExt, AsyncWriteExt},
|
||||||
pin,
|
pin,
|
||||||
};
|
};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// A file system entry
|
/// A file system entry
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
@ -125,7 +127,8 @@ pub(crate) async fn store_in_cas<
|
||||||
|
|
||||||
impl PackageFS {
|
impl PackageFS {
|
||||||
/// Write the package to the given destination
|
/// Write the package to the given destination
|
||||||
pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
|
#[instrument(skip(self), level = "debug")]
|
||||||
|
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
|
||||||
&self,
|
&self,
|
||||||
destination: P,
|
destination: P,
|
||||||
cas_path: Q,
|
cas_path: Q,
|
||||||
|
@ -211,7 +214,8 @@ impl PackageFS {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the contents of the file with the given hash
|
/// Returns the contents of the file with the given hash
|
||||||
pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>(
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
|
pub async fn read_file<P: AsRef<Path> + Debug, H: AsRef<str> + Debug>(
|
||||||
&self,
|
&self,
|
||||||
file_hash: H,
|
file_hash: H,
|
||||||
cas_path: P,
|
cas_path: P,
|
||||||
|
|
|
@ -27,6 +27,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::{sync::Mutex, task::spawn_blocking};
|
use tokio::{sync::Mutex, task::spawn_blocking};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// The Git package reference
|
/// The Git package reference
|
||||||
pub mod pkg_ref;
|
pub mod pkg_ref;
|
||||||
|
@ -70,10 +71,12 @@ impl PackageSource for GitPackageSource {
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, project).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
|
@ -329,6 +332,7 @@ impl PackageSource for GitPackageSource {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
|
@ -343,7 +347,7 @@ impl PackageSource for GitPackageSource {
|
||||||
|
|
||||||
match fs::read_to_string(&index_file).await {
|
match fs::read_to_string(&index_file).await {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"using cached index file for package {}#{}",
|
"using cached index file for package {}#{}",
|
||||||
pkg_ref.repo,
|
pkg_ref.repo,
|
||||||
pkg_ref.tree_id
|
pkg_ref.tree_id
|
||||||
|
@ -487,7 +491,7 @@ impl PackageSource for GitPackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
if pkg_ref.use_new_structure() && name == "default.project.json" {
|
if pkg_ref.use_new_structure() && name == "default.project.json" {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"removing default.project.json from {}#{} at {path} - using new structure",
|
"removing default.project.json from {}#{} at {path} - using new structure",
|
||||||
pkg_ref.repo,
|
pkg_ref.repo,
|
||||||
pkg_ref.tree_id
|
pkg_ref.tree_id
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
#![allow(async_fn_in_trait)]
|
#![allow(async_fn_in_trait)]
|
||||||
|
|
||||||
use crate::{util::authenticate_conn, Project};
|
use crate::{util::authenticate_conn, Project};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use gix::remote::Direction;
|
use gix::remote::Direction;
|
||||||
|
use std::fmt::Debug;
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::task::spawn_blocking;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// A trait for sources that are based on Git repositories
|
/// A trait for sources that are based on Git repositories
|
||||||
pub trait GitBasedSource {
|
pub trait GitBasedSource {
|
||||||
|
@ -90,7 +93,11 @@ pub trait GitBasedSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads a file from a tree
|
/// Reads a file from a tree
|
||||||
pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix::bstr::BStr>>(
|
#[instrument(skip(tree), ret, level = "trace")]
|
||||||
|
pub fn read_file<
|
||||||
|
I: IntoIterator<Item = P> + Clone + Debug,
|
||||||
|
P: ToString + PartialEq<gix::bstr::BStr>,
|
||||||
|
>(
|
||||||
tree: &gix::Tree,
|
tree: &gix::Tree,
|
||||||
file_path: I,
|
file_path: I,
|
||||||
) -> Result<Option<String>, errors::ReadFile> {
|
) -> Result<Option<String>, errors::ReadFile> {
|
||||||
|
@ -120,6 +127,7 @@ pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix:
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the root tree of a repository
|
/// Gets the root tree of a repository
|
||||||
|
#[instrument(skip(repo), level = "trace")]
|
||||||
pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> {
|
pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> {
|
||||||
// this is a bare repo, so this is the actual path
|
// this is a bare repo, so this is the actual path
|
||||||
let path = repo.path().to_path_buf();
|
let path = repo.path().to_path_buf();
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::{
|
||||||
target::{Target, TargetKind},
|
target::{Target, TargetKind},
|
||||||
DependencyType,
|
DependencyType,
|
||||||
},
|
},
|
||||||
names::PackageNames,
|
names::{PackageName, PackageNames},
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FSEntry, PackageFS},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
|
@ -30,6 +30,7 @@ use crate::{
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::task::spawn_blocking;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// The pesde package reference
|
/// The pesde package reference
|
||||||
pub mod pkg_ref;
|
pub mod pkg_ref;
|
||||||
|
@ -73,6 +74,7 @@ impl PesdePackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads the config file
|
/// Reads the config file
|
||||||
|
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
|
||||||
pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
|
pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
|
||||||
let repo_url = self.repo_url.clone();
|
let repo_url = self.repo_url.clone();
|
||||||
let path = self.path(project);
|
let path = self.path(project);
|
||||||
|
@ -90,14 +92,6 @@ impl PesdePackageSource {
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The git2 repository for the index
|
|
||||||
#[cfg(feature = "git2")]
|
|
||||||
pub fn repo_git2(&self, project: &Project) -> Result<git2::Repository, git2::Error> {
|
|
||||||
let path = self.path(project);
|
|
||||||
|
|
||||||
git2::Repository::open_bare(&path)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PackageSource for PesdePackageSource {
|
impl PackageSource for PesdePackageSource {
|
||||||
|
@ -107,10 +101,12 @@ impl PackageSource for PesdePackageSource {
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, project).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
|
@ -132,10 +128,10 @@ impl PackageSource for PesdePackageSource {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let entries: IndexFile = toml::from_str(&string)
|
let IndexFile { entries, .. } = toml::from_str(&string)
|
||||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||||
|
|
||||||
log::debug!("{} has {} possible entries", specifier.name, entries.len());
|
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
PackageNames::Pesde(specifier.name.clone()),
|
PackageNames::Pesde(specifier.name.clone()),
|
||||||
|
@ -163,6 +159,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
|
@ -179,7 +176,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
|
|
||||||
match fs::read_to_string(&index_file).await {
|
match fs::read_to_string(&index_file).await {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"using cached index file for package {}@{} {}",
|
"using cached index file for package {}@{} {}",
|
||||||
pkg_ref.name,
|
pkg_ref.name,
|
||||||
pkg_ref.version,
|
pkg_ref.version,
|
||||||
|
@ -200,7 +197,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
|
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
|
||||||
|
|
||||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
||||||
log::debug!("using token for {}", self.repo_url);
|
tracing::debug!("using token for {}", self.repo_url);
|
||||||
request = request.header(AUTHORIZATION, token);
|
request = request.header(AUTHORIZATION, token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -282,28 +279,35 @@ impl Default for AllowedRegistries {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AllowedRegistries {
|
// strips .git suffix to allow for more flexible matching
|
||||||
/// Whether the given URL is allowed
|
fn simplify_url(mut url: Url) -> Url {
|
||||||
pub fn is_allowed(&self, mut this: Url, mut external: Url) -> bool {
|
url.path = url.path.strip_suffix(b".git").unwrap_or(&url.path).into();
|
||||||
// strip .git suffix to allow for more flexible matching
|
url
|
||||||
this.path = this.path.strip_suffix(b".git").unwrap_or(&this.path).into();
|
}
|
||||||
external.path = external
|
|
||||||
.path
|
|
||||||
.strip_suffix(b".git")
|
|
||||||
.unwrap_or(&external.path)
|
|
||||||
.into();
|
|
||||||
|
|
||||||
this == external
|
impl AllowedRegistries {
|
||||||
|| (match self {
|
fn _is_allowed(&self, url: &Url) -> bool {
|
||||||
Self::All(all) => *all,
|
match self {
|
||||||
Self::Specific(urls) => urls.contains(&this) || urls.contains(&external),
|
Self::All(all) => *all,
|
||||||
})
|
Self::Specific(urls) => urls.contains(url),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the given URL is allowed
|
||||||
|
pub fn is_allowed(&self, url: Url) -> bool {
|
||||||
|
self._is_allowed(&simplify_url(url))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the given URL is allowed, or is the same as the given URL
|
||||||
|
pub fn is_allowed_or_same(&self, this: Url, external: Url) -> bool {
|
||||||
|
let this = simplify_url(this);
|
||||||
|
let external = simplify_url(external);
|
||||||
|
(this == external) || self._is_allowed(&external) || self._is_allowed(&this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The configuration for the pesde index
|
/// The configuration for the pesde index
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
#[serde(deny_unknown_fields)]
|
|
||||||
pub struct IndexConfig {
|
pub struct IndexConfig {
|
||||||
/// The URL of the API
|
/// The URL of the API
|
||||||
pub api: url::Url,
|
pub api: url::Url,
|
||||||
|
@ -311,19 +315,22 @@ pub struct IndexConfig {
|
||||||
pub download: Option<String>,
|
pub download: Option<String>,
|
||||||
/// Whether Git is allowed as a source for publishing packages
|
/// Whether Git is allowed as a source for publishing packages
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub git_allowed: bool,
|
pub git_allowed: AllowedRegistries,
|
||||||
/// Whether other registries are allowed as a source for publishing packages
|
/// Whether other registries are allowed as a source for publishing packages
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub other_registries_allowed: AllowedRegistries,
|
pub other_registries_allowed: AllowedRegistries,
|
||||||
/// Whether Wally is allowed as a source for publishing packages
|
/// Whether Wally is allowed as a source for publishing packages
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub wally_allowed: bool,
|
pub wally_allowed: AllowedRegistries,
|
||||||
/// The OAuth client ID for GitHub
|
/// The OAuth client ID for GitHub
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub github_oauth_client_id: Option<String>,
|
pub github_oauth_client_id: Option<String>,
|
||||||
/// The maximum size of an archive in bytes
|
/// The maximum size of an archive in bytes
|
||||||
#[serde(default = "default_archive_size")]
|
#[serde(default = "default_archive_size")]
|
||||||
pub max_archive_size: usize,
|
pub max_archive_size: usize,
|
||||||
|
/// The packages to display in the CLI for default script implementations
|
||||||
|
#[serde(default)]
|
||||||
|
pub scripts_packages: Vec<PackageName>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexConfig {
|
impl IndexConfig {
|
||||||
|
@ -425,8 +432,20 @@ pub struct IndexFileEntry {
|
||||||
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
|
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The package metadata in the index file
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||||
|
pub struct IndexMetadata {}
|
||||||
|
|
||||||
/// The index file for a package
|
/// The index file for a package
|
||||||
pub type IndexFile = BTreeMap<VersionId, IndexFileEntry>;
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
|
pub struct IndexFile {
|
||||||
|
/// Any package-wide metadata
|
||||||
|
#[serde(default, skip_serializing_if = "crate::util::is_default")]
|
||||||
|
pub meta: IndexMetadata,
|
||||||
|
/// The entries in the index file
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub entries: BTreeMap<VersionId, IndexFileEntry>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Errors that can occur when interacting with the pesde package source
|
/// Errors that can occur when interacting with the pesde package source
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
|
|
|
@ -11,6 +11,7 @@ use crate::{
|
||||||
Project, LINK_LIB_NO_FILE_FOUND,
|
Project, LINK_LIB_NO_FILE_FOUND,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
@ -19,7 +20,8 @@ struct SourcemapNode {
|
||||||
file_paths: Vec<RelativePathBuf>,
|
file_paths: Vec<RelativePathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn find_lib_path(
|
#[instrument(skip(project, package_dir), level = "debug")]
|
||||||
|
async fn find_lib_path(
|
||||||
project: &Project,
|
project: &Project,
|
||||||
package_dir: &Path,
|
package_dir: &Path,
|
||||||
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
|
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
|
||||||
|
@ -29,7 +31,7 @@ pub(crate) async fn find_lib_path(
|
||||||
.scripts
|
.scripts
|
||||||
.get(&ScriptName::SourcemapGenerator.to_string())
|
.get(&ScriptName::SourcemapGenerator.to_string())
|
||||||
else {
|
else {
|
||||||
log::warn!("no sourcemap generator script found in manifest");
|
tracing::warn!("no sourcemap generator script found in manifest");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -39,7 +41,8 @@ pub(crate) async fn find_lib_path(
|
||||||
[package_dir],
|
[package_dir],
|
||||||
project,
|
project,
|
||||||
true,
|
true,
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if let Some(result) = result.filter(|result| !result.is_empty()) {
|
if let Some(result) = result.filter(|result| !result.is_empty()) {
|
||||||
let node: SourcemapNode = serde_json::from_str(&result)?;
|
let node: SourcemapNode = serde_json::from_str(&result)?;
|
||||||
|
@ -54,6 +57,7 @@ pub(crate) async fn find_lib_path(
|
||||||
|
|
||||||
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
||||||
|
|
||||||
|
#[instrument(skip(project, tempdir), level = "debug")]
|
||||||
pub(crate) async fn get_target(
|
pub(crate) async fn get_target(
|
||||||
project: &Project,
|
project: &Project,
|
||||||
tempdir: &TempDir,
|
tempdir: &TempDir,
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use semver::{Version, VersionReq};
|
|
||||||
use serde::{Deserialize, Deserializer};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{errors, DependencyType},
|
manifest::{errors, DependencyType},
|
||||||
names::wally::WallyPackageName,
|
names::wally::WallyPackageName,
|
||||||
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
|
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
|
||||||
};
|
};
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
|
use serde::{Deserialize, Deserializer};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, Debug)]
|
#[derive(Deserialize, Clone, Debug)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "lowercase")]
|
||||||
|
@ -63,6 +63,7 @@ pub struct WallyManifest {
|
||||||
|
|
||||||
impl WallyManifest {
|
impl WallyManifest {
|
||||||
/// Get all dependencies from the manifest
|
/// Get all dependencies from the manifest
|
||||||
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub fn all_dependencies(
|
pub fn all_dependencies(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
|
|
|
@ -30,6 +30,7 @@ use std::{
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking};
|
use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking};
|
||||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
pub(crate) mod compat_util;
|
pub(crate) mod compat_util;
|
||||||
pub(crate) mod manifest;
|
pub(crate) mod manifest;
|
||||||
|
@ -68,6 +69,7 @@ impl WallyPackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reads the config file
|
/// Reads the config file
|
||||||
|
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
|
||||||
pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> {
|
pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> {
|
||||||
let repo_url = self.repo_url.clone();
|
let repo_url = self.repo_url.clone();
|
||||||
let path = self.path(project);
|
let path = self.path(project);
|
||||||
|
@ -94,10 +96,12 @@ impl PackageSource for WallyPackageSource {
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, project).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
|
@ -111,7 +115,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
let string = match read_file(&tree, [scope, name]) {
|
let string = match read_file(&tree, [scope, name]) {
|
||||||
Ok(Some(s)) => s,
|
Ok(Some(s)) => s,
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"{} not found in wally registry. searching in backup registries",
|
"{} not found in wally registry. searching in backup registries",
|
||||||
specifier.name
|
specifier.name
|
||||||
);
|
);
|
||||||
|
@ -134,7 +138,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok((name, results)) => {
|
Ok((name, results)) => {
|
||||||
log::debug!("found {} in backup registry {registry}", name);
|
tracing::debug!("found {} in backup registry {registry}", name);
|
||||||
return Ok((name, results));
|
return Ok((name, results));
|
||||||
}
|
}
|
||||||
Err(errors::ResolveError::NotFound(_)) => {
|
Err(errors::ResolveError::NotFound(_)) => {
|
||||||
|
@ -162,7 +166,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
.collect::<Result<_, _>>()
|
.collect::<Result<_, _>>()
|
||||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||||
|
|
||||||
log::debug!("{} has {} possible entries", specifier.name, entries.len());
|
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
PackageNames::Wally(specifier.name.clone()),
|
PackageNames::Wally(specifier.name.clone()),
|
||||||
|
@ -192,6 +196,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
|
@ -207,7 +212,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
|
|
||||||
let tempdir = match fs::read_to_string(&index_file).await {
|
let tempdir = match fs::read_to_string(&index_file).await {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"using cached index file for package {}@{}",
|
"using cached index file for package {}@{}",
|
||||||
pkg_ref.name,
|
pkg_ref.name,
|
||||||
pkg_ref.version
|
pkg_ref.version
|
||||||
|
@ -240,7 +245,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
||||||
log::debug!("using token for {}", self.repo_url);
|
tracing::debug!("using token for {}", self.repo_url);
|
||||||
request = request.header(AUTHORIZATION, token);
|
request = request.header(AUTHORIZATION, token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ use relative_path::RelativePathBuf;
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
use std::collections::{BTreeMap, HashSet};
|
use std::collections::{BTreeMap, HashSet};
|
||||||
use tokio::pin;
|
use tokio::pin;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// The workspace package reference
|
/// The workspace package reference
|
||||||
pub mod pkg_ref;
|
pub mod pkg_ref;
|
||||||
|
@ -35,6 +36,7 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
|
@ -49,7 +51,7 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
.unwrap_or(&project.package_dir);
|
.unwrap_or(&project.package_dir);
|
||||||
let target = specifier.target.unwrap_or(project_target);
|
let target = specifier.target.unwrap_or(project_target);
|
||||||
|
|
||||||
let members = project.workspace_members(workspace_dir).await?;
|
let members = project.workspace_members(workspace_dir, true).await?;
|
||||||
pin!(members);
|
pin!(members);
|
||||||
|
|
||||||
while let Some((path, manifest)) = members.next().await.transpose()? {
|
while let Some((path, manifest)) = members.next().await.transpose()? {
|
||||||
|
@ -126,6 +128,7 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
|
|
|
@ -19,7 +19,7 @@ impl DependencySpecifier for WorkspaceDependencySpecifier {}
|
||||||
|
|
||||||
impl Display for WorkspaceDependencySpecifier {
|
impl Display for WorkspaceDependencySpecifier {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "workspace:{}{}", self.version, self.name)
|
write!(f, "{}@workspace:{}", self.name, self.version)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,6 +108,7 @@ pub mod errors {
|
||||||
|
|
||||||
/// Errors that can occur when parsing a version type
|
/// Errors that can occur when parsing a version type
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum VersionTypeFromStr {
|
pub enum VersionTypeFromStr {
|
||||||
/// The version type is invalid
|
/// The version type is invalid
|
||||||
#[error("invalid version type {0}")]
|
#[error("invalid version type {0}")]
|
||||||
|
@ -116,6 +117,7 @@ pub mod errors {
|
||||||
|
|
||||||
/// Errors that can occur when parsing a version type or requirement
|
/// Errors that can occur when parsing a version type or requirement
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum VersionTypeOrReqFromStr {
|
pub enum VersionTypeOrReqFromStr {
|
||||||
/// The version requirement is invalid
|
/// The version requirement is invalid
|
||||||
#[error("invalid version requirement {0}")]
|
#[error("invalid version requirement {0}")]
|
||||||
|
|
|
@ -83,3 +83,7 @@ pub fn deserialize_git_like_url<'de, D: Deserializer<'de>>(
|
||||||
pub fn hash<S: AsRef<[u8]>>(struc: S) -> String {
|
pub fn hash<S: AsRef<[u8]>>(struc: S) -> String {
|
||||||
format!("{:x}", Sha256::digest(struc.as_ref()))
|
format!("{:x}", Sha256::digest(struc.as_ref()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_default<T: Default + Eq>(t: &T) -> bool {
|
||||||
|
t == &T::default()
|
||||||
|
}
|
||||||
|
|
Binary file not shown.
|
@ -12,7 +12,7 @@
|
||||||
"format": "prettier --write ."
|
"format": "prettier --write ."
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@sveltejs/adapter-vercel": "^5.4.6",
|
"@sveltejs/adapter-cloudflare": "^4.8.0",
|
||||||
"@sveltejs/kit": "^2.7.3",
|
"@sveltejs/kit": "^2.7.3",
|
||||||
"@sveltejs/vite-plugin-svelte": "^4.0.0",
|
"@sveltejs/vite-plugin-svelte": "^4.0.0",
|
||||||
"@tailwindcss/typography": "^0.5.15",
|
"@tailwindcss/typography": "^0.5.15",
|
||||||
|
@ -24,6 +24,9 @@
|
||||||
"eslint-config-prettier": "^9.1.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-plugin-svelte": "^2.46.0",
|
"eslint-plugin-svelte": "^2.46.0",
|
||||||
"globals": "^15.11.0",
|
"globals": "^15.11.0",
|
||||||
|
"mdast": "^3.0.0",
|
||||||
|
"mdast-util-directive": "^3.0.0",
|
||||||
|
"mdast-util-to-hast": "^13.2.0",
|
||||||
"mdsvex": "^0.12.3",
|
"mdsvex": "^0.12.3",
|
||||||
"prettier": "^3.3.3",
|
"prettier": "^3.3.3",
|
||||||
"prettier-plugin-svelte": "^3.2.7",
|
"prettier-plugin-svelte": "^3.2.7",
|
||||||
|
@ -33,7 +36,8 @@
|
||||||
"tailwindcss": "^3.4.14",
|
"tailwindcss": "^3.4.14",
|
||||||
"typescript": "^5.6.3",
|
"typescript": "^5.6.3",
|
||||||
"typescript-eslint": "^8.12.2",
|
"typescript-eslint": "^8.12.2",
|
||||||
"vite": "^5.4.10"
|
"vite": "^5.4.10",
|
||||||
|
"wrangler": "^3.91.0"
|
||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
@ -47,15 +51,19 @@
|
||||||
"hast-util-heading": "^3.0.0",
|
"hast-util-heading": "^3.0.0",
|
||||||
"hast-util-heading-rank": "^3.0.0",
|
"hast-util-heading-rank": "^3.0.0",
|
||||||
"hast-util-to-text": "^4.0.2",
|
"hast-util-to-text": "^4.0.2",
|
||||||
|
"hastscript": "^9.0.0",
|
||||||
|
"lucide-static": "^0.462.0",
|
||||||
"lucide-svelte": "^0.446.0",
|
"lucide-svelte": "^0.446.0",
|
||||||
"rehype-infer-description-meta": "^2.0.0",
|
"rehype-infer-description-meta": "^2.0.0",
|
||||||
"rehype-raw": "^7.0.0",
|
"rehype-raw": "^7.0.0",
|
||||||
"rehype-sanitize": "^6.0.0",
|
"rehype-sanitize": "^6.0.0",
|
||||||
"rehype-slug": "^6.0.0",
|
"rehype-slug": "^6.0.0",
|
||||||
"rehype-stringify": "^10.0.1",
|
"rehype-stringify": "^10.0.1",
|
||||||
|
"remark-directive": "^3.0.0",
|
||||||
"remark-frontmatter": "^5.0.0",
|
"remark-frontmatter": "^5.0.0",
|
||||||
"remark-gemoji": "^8.0.0",
|
"remark-gemoji": "^8.0.0",
|
||||||
"remark-gfm": "^4.0.0",
|
"remark-gfm": "^4.0.0",
|
||||||
|
"remark-github-admonitions-to-directives": "^2.1.0",
|
||||||
"remark-parse": "^11.0.0",
|
"remark-parse": "^11.0.0",
|
||||||
"remark-rehype": "^11.1.1",
|
"remark-rehype": "^11.1.1",
|
||||||
"shiki": "^1.22.2",
|
"shiki": "^1.22.2",
|
||||||
|
|
108
website/src/admonitions.css
Normal file
108
website/src/admonitions.css
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
.admonition {
|
||||||
|
@apply my-4 rounded-sm px-4 py-3 text-[--tw-prose-body] prose-p:my-2 prose-pre:my-4;
|
||||||
|
@apply border-l-4 border-[--admonition-border];
|
||||||
|
@apply bg-[--admonition-bg];
|
||||||
|
|
||||||
|
@apply [--shiki-background:theme(colors.white/0.2)];
|
||||||
|
@apply dark:[--shiki-background:theme(colors.black/0.2)];
|
||||||
|
|
||||||
|
--tw-prose-body: theme(colors.light);
|
||||||
|
--tw-prose-headings: theme(colors.light);
|
||||||
|
--tw-prose-lead: theme(colors.light);
|
||||||
|
--tw-prose-links: var(--admonition-text);
|
||||||
|
--tw-prose-bold: theme(colors.light);
|
||||||
|
--tw-prose-counters: theme(colors.light);
|
||||||
|
--tw-prose-bullets: var(--admonition-border);
|
||||||
|
--tw-prose-hr: var(--admonition-border);
|
||||||
|
--tw-prose-quotes: theme(colors.light);
|
||||||
|
--tw-prose-quote-borders: var(--admonition-border);
|
||||||
|
--tw-prose-code: theme(colors.light);
|
||||||
|
--tw-prose-pre-code: theme(colors.light);
|
||||||
|
--tw-prose-pre-bg: var(--shiki-background);
|
||||||
|
--tw-prose-th-borders: var(--admonition-border);
|
||||||
|
--tw-prose-td-borders: var(--admonition-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition pre {
|
||||||
|
@apply border border-[--admonition-border] bg-[--shiki-background];
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-title {
|
||||||
|
@apply flex items-center space-x-2 text-lg font-semibold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-title * {
|
||||||
|
color: var(--admonition-text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-icon {
|
||||||
|
@apply inline-block size-6 bg-current;
|
||||||
|
mask-image: var(--admonition-icon);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-note {
|
||||||
|
--admonition-bg: theme(colors.blue.600 / 0.1);
|
||||||
|
--admonition-border: theme(colors.blue.600 / 0.4);
|
||||||
|
--admonition-text: theme(colors.blue.950);
|
||||||
|
--admonition-icon: url(lucide-static/icons/info.svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-tip {
|
||||||
|
--admonition-bg: theme(colors.green.600 / 0.1);
|
||||||
|
--admonition-border: theme(colors.green.600 / 0.4);
|
||||||
|
--admonition-text: theme(colors.green.950);
|
||||||
|
--admonition-icon: url(lucide-static/icons/lightbulb.svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-info {
|
||||||
|
--admonition-bg: theme(colors.purple.600 / 0.1);
|
||||||
|
--admonition-border: theme(colors.purple.600 / 0.4);
|
||||||
|
--admonition-text: theme(colors.purple.950);
|
||||||
|
--admonition-icon: url(lucide-static/icons/message-square-warning.svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-warning {
|
||||||
|
--admonition-bg: theme(colors.yellow.600 / 0.1);
|
||||||
|
--admonition-border: theme(colors.yellow.600 / 0.4);
|
||||||
|
--admonition-text: theme(colors.yellow.950);
|
||||||
|
--admonition-icon: url(lucide-static/icons/triangle-alert.svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-danger {
|
||||||
|
--admonition-bg: theme(colors.red.600 / 0.1);
|
||||||
|
--admonition-border: theme(colors.red.600 / 0.4);
|
||||||
|
--admonition-text: theme(colors.red.950);
|
||||||
|
--admonition-icon: url(lucide-static/icons/octagon-alert.svg);
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
.admonition-note {
|
||||||
|
--admonition-bg: theme(colors.blue.500 / 0.1);
|
||||||
|
--admonition-border: theme(colors.blue.500 / 0.6);
|
||||||
|
--admonition-text: theme(colors.blue.100);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-tip {
|
||||||
|
--admonition-bg: theme(colors.green.500 / 0.1);
|
||||||
|
--admonition-border: theme(colors.green.500 / 0.6);
|
||||||
|
--admonition-text: theme(colors.green.100);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-info {
|
||||||
|
--admonition-bg: theme(colors.purple.500 / 0.1);
|
||||||
|
--admonition-border: theme(colors.purple.500 / 0.6);
|
||||||
|
--admonition-text: theme(colors.purple.100);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-warning {
|
||||||
|
--admonition-bg: theme(colors.yellow.500 / 0.1);
|
||||||
|
--admonition-border: theme(colors.yellow.500 / 0.6);
|
||||||
|
--admonition-text: theme(colors.yellow.100);
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition-danger {
|
||||||
|
--admonition-bg: theme(colors.red.500 / 0.1);
|
||||||
|
--admonition-border: theme(colors.red.500 / 0.6);
|
||||||
|
--admonition-text: theme(colors.red.100);
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,6 +2,8 @@
|
||||||
@import "tailwindcss/components";
|
@import "tailwindcss/components";
|
||||||
@import "tailwindcss/utilities";
|
@import "tailwindcss/utilities";
|
||||||
|
|
||||||
|
@import "admonitions.css";
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
--color-background: 255 245 230;
|
--color-background: 255 245 230;
|
||||||
--color-card: 245 230 210;
|
--color-card: 245 230 210;
|
||||||
|
|
2
website/src/app.d.ts
vendored
2
website/src/app.d.ts
vendored
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="mdast-util-directive" />
|
||||||
|
|
||||||
// See https://kit.svelte.dev/docs/types#app
|
// See https://kit.svelte.dev/docs/types#app
|
||||||
// for information about these interfaces
|
// for information about these interfaces
|
||||||
declare global {
|
declare global {
|
||||||
|
|
|
@ -3,25 +3,116 @@ import type { Nodes } from "hast"
|
||||||
import { heading } from "hast-util-heading"
|
import { heading } from "hast-util-heading"
|
||||||
import { headingRank } from "hast-util-heading-rank"
|
import { headingRank } from "hast-util-heading-rank"
|
||||||
import { toText } from "hast-util-to-text"
|
import { toText } from "hast-util-to-text"
|
||||||
|
import { h, type Child } from "hastscript"
|
||||||
|
import type { ContainerDirective } from "mdast-util-directive"
|
||||||
|
import type { Handler } from "mdast-util-to-hast"
|
||||||
import rehypeInferDescriptionMeta from "rehype-infer-description-meta"
|
import rehypeInferDescriptionMeta from "rehype-infer-description-meta"
|
||||||
import rehypeRaw from "rehype-raw"
|
import rehypeRaw from "rehype-raw"
|
||||||
import rehypeSanitize from "rehype-sanitize"
|
import rehypeSanitize, { defaultSchema } from "rehype-sanitize"
|
||||||
import rehypeSlug from "rehype-slug"
|
import rehypeSlug from "rehype-slug"
|
||||||
import rehypeStringify from "rehype-stringify"
|
import rehypeStringify from "rehype-stringify"
|
||||||
|
import remarkDirective from "remark-directive"
|
||||||
import remarkFrontmatter from "remark-frontmatter"
|
import remarkFrontmatter from "remark-frontmatter"
|
||||||
import remarkGemoji from "remark-gemoji"
|
import remarkGemoji from "remark-gemoji"
|
||||||
import remarkGfm from "remark-gfm"
|
import remarkGfm from "remark-gfm"
|
||||||
|
import remarkGithubAdmonitionsToDirectives from "remark-github-admonitions-to-directives"
|
||||||
import remarkParse from "remark-parse"
|
import remarkParse from "remark-parse"
|
||||||
import remarkRehype from "remark-rehype"
|
import remarkRehype from "remark-rehype"
|
||||||
import { createCssVariablesTheme, createHighlighter } from "shiki"
|
import { createCssVariablesTheme, createHighlighter, loadWasm } from "shiki"
|
||||||
import { unified } from "unified"
|
import { unified } from "unified"
|
||||||
import type { Node } from "unist"
|
import type { Node } from "unist"
|
||||||
import { map } from "unist-util-map"
|
import { map } from "unist-util-map"
|
||||||
|
|
||||||
const highlighter = createHighlighter({
|
// @ts-expect-error - typescript doesn't like the wasm import
|
||||||
themes: [],
|
import onigWasm from "shiki/onig.wasm"
|
||||||
langs: [],
|
|
||||||
})
|
const loadOnigWasm = (async () => {
|
||||||
|
await loadWasm(onigWasm())
|
||||||
|
})()
|
||||||
|
|
||||||
|
const highlighter = (async () => {
|
||||||
|
await loadOnigWasm
|
||||||
|
|
||||||
|
return await createHighlighter({
|
||||||
|
themes: [],
|
||||||
|
langs: [],
|
||||||
|
})
|
||||||
|
})()
|
||||||
|
|
||||||
|
const ADMONITION_TYPES = {
|
||||||
|
note: {
|
||||||
|
label: "Note",
|
||||||
|
},
|
||||||
|
tip: {
|
||||||
|
label: "Tip",
|
||||||
|
},
|
||||||
|
info: {
|
||||||
|
label: "Info",
|
||||||
|
},
|
||||||
|
warning: {
|
||||||
|
label: "Warning",
|
||||||
|
},
|
||||||
|
danger: {
|
||||||
|
label: "Danger",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerDirectiveHandler: Handler = (state, node: ContainerDirective) => {
|
||||||
|
const type = node.name as keyof typeof ADMONITION_TYPES
|
||||||
|
if (!type || !(type in ADMONITION_TYPES)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeInfo = ADMONITION_TYPES[type]
|
||||||
|
|
||||||
|
let label: Child = typeInfo.label
|
||||||
|
|
||||||
|
const firstChild = node.children[0]
|
||||||
|
if (firstChild?.type === "paragraph" && firstChild.data?.directiveLabel) {
|
||||||
|
node.children.shift()
|
||||||
|
label = state.all(firstChild)
|
||||||
|
}
|
||||||
|
|
||||||
|
return h(
|
||||||
|
"div",
|
||||||
|
{
|
||||||
|
class: `admonition admonition-${type}`,
|
||||||
|
},
|
||||||
|
[
|
||||||
|
h(
|
||||||
|
"p",
|
||||||
|
{
|
||||||
|
class: "admonition-title",
|
||||||
|
},
|
||||||
|
[
|
||||||
|
h("span", {
|
||||||
|
class: "admonition-icon",
|
||||||
|
}),
|
||||||
|
h(
|
||||||
|
"span",
|
||||||
|
{
|
||||||
|
class: "admonition-label",
|
||||||
|
},
|
||||||
|
label,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
state.all(node),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const sanitizeSchema: typeof defaultSchema = {
|
||||||
|
...defaultSchema,
|
||||||
|
attributes: {
|
||||||
|
...defaultSchema.attributes,
|
||||||
|
"*": [...(defaultSchema.attributes?.["*"] ?? []), ["className", "admonition", /^admonition-/]],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const remarkRehypeHandlers = {
|
||||||
|
containerDirective: containerDirectiveHandler,
|
||||||
|
}
|
||||||
|
|
||||||
export const markdown = (async () => {
|
export const markdown = (async () => {
|
||||||
return unified()
|
return unified()
|
||||||
|
@ -29,9 +120,11 @@ export const markdown = (async () => {
|
||||||
.use(remarkFrontmatter)
|
.use(remarkFrontmatter)
|
||||||
.use(remarkGfm)
|
.use(remarkGfm)
|
||||||
.use(remarkGemoji)
|
.use(remarkGemoji)
|
||||||
.use(remarkRehype, { allowDangerousHtml: true })
|
.use(remarkGithubAdmonitionsToDirectives)
|
||||||
|
.use(remarkDirective)
|
||||||
|
.use(remarkRehype, { allowDangerousHtml: true, handlers: remarkRehypeHandlers })
|
||||||
.use(rehypeRaw)
|
.use(rehypeRaw)
|
||||||
.use(rehypeSanitize)
|
.use(rehypeSanitize, sanitizeSchema)
|
||||||
.use(rehypeShikiFromHighlighter, await highlighter, {
|
.use(rehypeShikiFromHighlighter, await highlighter, {
|
||||||
lazy: true,
|
lazy: true,
|
||||||
theme: createCssVariablesTheme({
|
theme: createCssVariablesTheme({
|
||||||
|
@ -58,7 +151,13 @@ export const docsMarkdown = (async () => {
|
||||||
.use(remarkFrontmatter)
|
.use(remarkFrontmatter)
|
||||||
.use(remarkGfm)
|
.use(remarkGfm)
|
||||||
.use(remarkGemoji)
|
.use(remarkGemoji)
|
||||||
.use(remarkRehype, { allowDangerousHtml: true, clobberPrefix: "" })
|
.use(remarkGithubAdmonitionsToDirectives)
|
||||||
|
.use(remarkDirective)
|
||||||
|
.use(remarkRehype, {
|
||||||
|
allowDangerousHtml: true,
|
||||||
|
clobberPrefix: "",
|
||||||
|
handlers: remarkRehypeHandlers,
|
||||||
|
})
|
||||||
.use(rehypeSlug)
|
.use(rehypeSlug)
|
||||||
.use(() => (node, file) => {
|
.use(() => (node, file) => {
|
||||||
const toc: TocItem[] = []
|
const toc: TocItem[] = []
|
||||||
|
@ -97,7 +196,7 @@ export const docsMarkdown = (async () => {
|
||||||
}) as Node
|
}) as Node
|
||||||
})
|
})
|
||||||
.use(rehypeRaw)
|
.use(rehypeRaw)
|
||||||
.use(rehypeSanitize)
|
.use(rehypeSanitize, sanitizeSchema)
|
||||||
.use(rehypeShikiFromHighlighter, await highlighter, {
|
.use(rehypeShikiFromHighlighter, await highlighter, {
|
||||||
lazy: true,
|
lazy: true,
|
||||||
theme: createCssVariablesTheme({
|
theme: createCssVariablesTheme({
|
||||||
|
|
|
@ -26,6 +26,7 @@ export type TargetInfo = {
|
||||||
kind: TargetKind
|
kind: TargetKind
|
||||||
lib: boolean
|
lib: boolean
|
||||||
bin: boolean
|
bin: boolean
|
||||||
|
scripts?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau"
|
export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
import { page } from "$app/stores"
|
import { page } from "$app/stores"
|
||||||
import GitHub from "$lib/components/GitHub.svelte"
|
import GitHub from "$lib/components/GitHub.svelte"
|
||||||
import type { TargetInfo } from "$lib/registry-api"
|
import type { TargetInfo } from "$lib/registry-api"
|
||||||
import { BinaryIcon, Globe, Icon, LibraryIcon, Mail } from "lucide-svelte"
|
import { BinaryIcon, Globe, Icon, LibraryIcon, Mail, ScrollIcon } from "lucide-svelte"
|
||||||
import type { ComponentType } from "svelte"
|
import type { ComponentType } from "svelte"
|
||||||
import TargetSelector from "../../TargetSelector.svelte"
|
import TargetSelector from "../../TargetSelector.svelte"
|
||||||
import Command from "./Command.svelte"
|
import Command from "./Command.svelte"
|
||||||
|
@ -36,11 +36,13 @@
|
||||||
const exportNames: Partial<Record<keyof TargetInfo, string>> = {
|
const exportNames: Partial<Record<keyof TargetInfo, string>> = {
|
||||||
lib: "Library",
|
lib: "Library",
|
||||||
bin: "Binary",
|
bin: "Binary",
|
||||||
|
scripts: "Scripts",
|
||||||
}
|
}
|
||||||
|
|
||||||
const exportIcons: Partial<Record<keyof TargetInfo, ComponentType<Icon>>> = {
|
const exportIcons: Partial<Record<keyof TargetInfo, ComponentType<Icon>>> = {
|
||||||
lib: LibraryIcon,
|
lib: LibraryIcon,
|
||||||
bin: BinaryIcon,
|
bin: BinaryIcon,
|
||||||
|
scripts: ScrollIcon,
|
||||||
}
|
}
|
||||||
|
|
||||||
const exportEntries = $derived(
|
const exportEntries = $derived(
|
||||||
|
@ -92,20 +94,30 @@
|
||||||
<ul class="mb-6 space-y-0.5">
|
<ul class="mb-6 space-y-0.5">
|
||||||
{#each exportEntries as [exportKey, exportName]}
|
{#each exportEntries as [exportKey, exportName]}
|
||||||
{@const Icon = exportIcons[exportKey as keyof TargetInfo]}
|
{@const Icon = exportIcons[exportKey as keyof TargetInfo]}
|
||||||
<li class="flex items-center">
|
<li>
|
||||||
<Icon aria-hidden="true" class="text-primary mr-2 size-5" />
|
<div class="flex items-center">
|
||||||
{exportName}
|
<Icon aria-hidden="true" class="text-primary mr-2 size-5" />
|
||||||
|
{exportName}
|
||||||
|
</div>
|
||||||
|
{#if exportKey === "bin"}
|
||||||
|
<p class="text-body/80 mb-4 mt-3 text-sm">
|
||||||
|
This package provides a binary that can be executed after installation, or globally
|
||||||
|
via:
|
||||||
|
</p>
|
||||||
|
<Command command={xCommand} class="mb-6" />
|
||||||
|
{:else if exportKey === "scripts"}
|
||||||
|
<div class="text-body/80 mt-3 flex flex-wrap gap-2 text-sm">
|
||||||
|
{#each currentTarget?.scripts ?? [] as script}
|
||||||
|
<div class="bg-card text-heading w-max truncate rounded px-3 py-2" title={script}>
|
||||||
|
{script}
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</li>
|
</li>
|
||||||
{/each}
|
{/each}
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
{#if currentTarget?.bin}
|
|
||||||
<p class="text-body/80 -mt-3 mb-4 text-sm">
|
|
||||||
This package provides a binary that can be executed after installation, or globally via:
|
|
||||||
</p>
|
|
||||||
<Command command={xCommand} class="mb-6" />
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
{#if data.pkg.authors && data.pkg.authors.length > 0}
|
{#if data.pkg.authors && data.pkg.authors.length > 0}
|
||||||
<h2 class="text-heading mb-2 text-lg font-semibold">Authors</h2>
|
<h2 class="text-heading mb-2 text-lg font-semibold">Authors</h2>
|
||||||
<ul>
|
<ul>
|
||||||
|
|
|
@ -2,7 +2,9 @@
|
||||||
const { data } = $props()
|
const { data } = $props()
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="prose min-w-0 py-8 prose-pre:w-full prose-pre:overflow-auto">
|
<div
|
||||||
|
class="prose prose-pre:w-full prose-pre:overflow-auto prose-img:inline-block prose-img:m-0 prose-video:inline-block prose-video:m-0 min-w-0 py-8"
|
||||||
|
>
|
||||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||||
{@html data.readmeHtml}
|
{@html data.readmeHtml}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { mdsvex } from "mdsvex"
|
import adapter from "@sveltejs/adapter-cloudflare"
|
||||||
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte"
|
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte"
|
||||||
import adapter from "@sveltejs/adapter-vercel"
|
import { mdsvex } from "mdsvex"
|
||||||
|
|
||||||
/** @type {import('@sveltejs/kit').Config} */
|
/** @type {import('@sveltejs/kit').Config} */
|
||||||
const config = {
|
const config = {
|
||||||
|
|
|
@ -1,6 +1,89 @@
|
||||||
import { sveltekit } from "@sveltejs/kit/vite"
|
import { sveltekit } from "@sveltejs/kit/vite"
|
||||||
import { defineConfig } from "vite"
|
import { readFile } from "node:fs/promises"
|
||||||
|
import path from "node:path"
|
||||||
|
import { defineConfig, type Plugin, type ResolvedConfig } from "vite"
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [sveltekit()],
|
plugins: [sveltekit(), cloudflareWasmImport()],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// This plugin allows us to import WebAssembly modules and have them work in
|
||||||
|
// both the browser, Node.js, and Cloudflare Workers.
|
||||||
|
function cloudflareWasmImport(): Plugin {
|
||||||
|
const wasmPostfix = ".wasm"
|
||||||
|
const importMetaPrefix = "___WASM_IMPORT_PATH___"
|
||||||
|
|
||||||
|
let config: ResolvedConfig
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: "cloudflare-wasm-import",
|
||||||
|
configResolved(resolvedConfig) {
|
||||||
|
config = resolvedConfig
|
||||||
|
},
|
||||||
|
async load(id) {
|
||||||
|
if (!id.endsWith(wasmPostfix)) return
|
||||||
|
|
||||||
|
if (config.command === "serve") {
|
||||||
|
// Running dev server
|
||||||
|
|
||||||
|
// We generate a module that on the browser will fetch the WASM file
|
||||||
|
// (through a Vite `?url` import), and on the server will read the file
|
||||||
|
// from the file system.
|
||||||
|
|
||||||
|
return `
|
||||||
|
import WASM_URL from ${JSON.stringify(`${id}?url`)}
|
||||||
|
|
||||||
|
let promise
|
||||||
|
export default function() {
|
||||||
|
if (import.meta.env.SSR) {
|
||||||
|
return promise ?? (promise = import("node:fs/promises")
|
||||||
|
.then(({ readFile }) => readFile(${JSON.stringify(id)})))
|
||||||
|
} else {
|
||||||
|
return promise ?? (promise = fetch(WASM_URL).then(r => r.arrayBuffer()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
// When building, we emit the WASM file as an asset and generate a module
|
||||||
|
// that will fetch the asset in the browser, import the WASM file when in
|
||||||
|
// a Cloudflare Worker, and read the file from the file system when in
|
||||||
|
// Node.js.
|
||||||
|
|
||||||
|
const wasmSource = await readFile(id)
|
||||||
|
|
||||||
|
const refId = this.emitFile({
|
||||||
|
type: "asset",
|
||||||
|
name: path.basename(id),
|
||||||
|
source: wasmSource,
|
||||||
|
})
|
||||||
|
|
||||||
|
return `
|
||||||
|
import WASM_URL from ${JSON.stringify(`${id}?url`)}
|
||||||
|
|
||||||
|
let promise
|
||||||
|
export default function() {
|
||||||
|
if (import.meta.env.SSR) {
|
||||||
|
if (typeof navigator !== "undefined" && navigator.userAgent === "Cloudflare-Workers") {
|
||||||
|
return promise ?? (promise = import(import.meta.${importMetaPrefix}${refId}))
|
||||||
|
} else {
|
||||||
|
return promise ?? (promise = import(\`\${"node:fs/promises"}\`)
|
||||||
|
.then(({ readFile }) => readFile(new URL(import.meta.ROLLUP_FILE_URL_${refId}))))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return promise ?? (promise = fetch(WASM_URL).then(r => r.arrayBuffer()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
},
|
||||||
|
resolveImportMeta(property, { chunkId }) {
|
||||||
|
if (!property?.startsWith(importMetaPrefix)) return
|
||||||
|
|
||||||
|
const refId = property.slice(importMetaPrefix.length)
|
||||||
|
const fileName = this.getFileName(refId)
|
||||||
|
const relativePath = path.relative(path.dirname(chunkId), fileName)
|
||||||
|
|
||||||
|
return JSON.stringify(relativePath)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
9
website/wrangler.toml
Normal file
9
website/wrangler.toml
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
name = "pesde"
|
||||||
|
compatibility_date = "2024-11-30"
|
||||||
|
pages_build_output_dir = ".svelte-kit/cloudflare"
|
||||||
|
|
||||||
|
[vars]
|
||||||
|
PUBLIC_REGISTRY_URL = "https://registry.pesde.daimond113.com/v0/"
|
||||||
|
|
||||||
|
[observability]
|
||||||
|
enabled = true
|
Loading…
Add table
Reference in a new issue