Compare commits

..

No commits in common. "0.5" and "v0.5.0-rc.16" have entirely different histories.

60 changed files with 989 additions and 1604 deletions

View file

@ -1,79 +0,0 @@
name: Debug
on:
push:
pull_request:
jobs:
get-version:
name: Get build version
runs-on: ubuntu-latest
outputs:
version: v${{ steps.get_version.outputs.value }}+rev.g${{ steps.trim_sha.outputs.trimmed_sha }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get package version
uses: SebRollen/toml-action@v1.2.0
id: get_version
with:
file: Cargo.toml
field: package.version
- name: Trim commit SHA
id: trim_sha
run: |
commit_sha=${{ github.sha }}
echo "trimmed_sha=${commit_sha:0:7}" | tee $GITHUB_OUTPUT
build:
strategy:
matrix:
include:
- job-name: windows-x86_64
target: x86_64-pc-windows-msvc
runs-on: windows-latest
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-windows-x86_64
- job-name: linux-x86_64
target: x86_64-unknown-linux-gnu
runs-on: ubuntu-latest
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
- job-name: macos-x86_64
target: x86_64-apple-darwin
runs-on: macos-13
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-x86_64
- job-name: macos-aarch64
target: aarch64-apple-darwin
runs-on: macos-latest
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-aarch64
name: Build for ${{ matrix.job-name }}
runs-on: ${{ matrix.runs-on }}
needs: get-version
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Linux build dependencies
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
run: |
sudo apt-get update
sudo apt-get install libdbus-1-dev pkg-config
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Compile in debug mode
run: cargo build --bins --no-default-features --features bin,patches,wally-compat --target ${{ matrix.target }} --locked
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact-name }}
if-no-files-found: error
path: |
target/${{ matrix.target }}/debug/pesde.exe
target/${{ matrix.target }}/debug/pesde

View file

@ -4,44 +4,8 @@ on:
tags: tags:
- v* - v*
env: env:
CRATE_NAME: pesde
BIN_NAME: pesde BIN_NAME: pesde
jobs: jobs:
prepare:
name: Prepare
runs-on: ubuntu-latest
outputs:
version: ${{ steps.extract_version.outputs.VERSION }}
found: ${{ steps.ensure_not_published.outputs.FOUND }}
steps:
- uses: actions/checkout@v4
- name: Extract version
id: extract_version
shell: bash
run: |
VERSION=$(echo ${{ github.ref_name }} | cut -d'+' -f1 | cut -c 2-)
echo "VERSION=$VERSION" >> "$GITHUB_OUTPUT"
- name: Ensure not published
id: ensure_not_published
shell: bash
env:
VERSION: ${{ steps.extract_version.outputs.VERSION }}
run: |
CRATE_NAME="${{ env.CRATE_NAME }}"
if [ ${#CRATE_NAME} -eq 1 ]; then
DIR="1"
elif [ ${#CRATE_NAME} -eq 2 ]; then
DIR="2"
elif [ ${#CRATE_NAME} -eq 3 ]; then
DIR="3/${CRATE_NAME:0:1}"
else
DIR="${CRATE_NAME:0:2}/${CRATE_NAME:2:2}"
fi
FOUND=$(curl -sSL --fail-with-body "https://index.crates.io/$DIR/${{ env.CRATE_NAME }}" | jq -s 'any(.[]; .vers == "${{ env.VERSION }}")')
echo "FOUND=$FOUND" >> "$GITHUB_OUTPUT"
build: build:
strategy: strategy:
matrix: matrix:
@ -67,17 +31,13 @@ jobs:
target: aarch64-apple-darwin target: aarch64-apple-darwin
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
name: Build for ${{ matrix.host }}-${{ matrix.arch }} name: Build for ${{ matrix.host }}-${{ matrix.arch }}
needs: [ prepare ]
if: ${{ needs.prepare.outputs.found == 'false' }}
env:
VERSION: ${{ needs.prepare.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable - uses: dtolnay/rust-toolchain@stable
- name: Set env - name: Set env
shell: bash shell: bash
run: | run: |
ARCHIVE_NAME=${{ env.BIN_NAME }}-${{ env.VERSION }}-${{ matrix.host }}-${{ matrix.arch }} ARCHIVE_NAME=${{ env.BIN_NAME }}-$(echo ${{ github.ref_name }} | cut -c 2-)-${{ matrix.host }}-${{ matrix.arch }}
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
@ -131,9 +91,7 @@ jobs:
permissions: permissions:
contents: write contents: write
pull-requests: read pull-requests: read
needs: [ prepare, publish ] needs: [ build, publish ]
env:
VERSION: ${{ needs.prepare.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -149,7 +107,7 @@ jobs:
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
tag_name: ${{ github.ref_name }} tag_name: ${{ github.ref_name }}
name: v${{ env.VERSION }} name: ${{ github.ref_name }}
draft: true draft: true
prerelease: ${{ startsWith(env.VERSION, '0') }} prerelease: ${{ startsWith(github.ref_name, 'v0') }}
files: artifacts/* files: artifacts/*

View file

@ -5,93 +5,150 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.5.3] - 2024-12-30 ## [0.5.0-rc.16] - 2024-12-08
### Added
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
### Changed
- Remove verbosity from release mode logging by @daimond113
## [0.5.2] - 2024-12-19
### Fixed ### Fixed
- Change dependency types for removed peer dependencies by @daimond113 - Do not require lib or bin exports if package exports scripts by @daimond113
- Resolve version to correct tag for `pesde_version` field by @daimond113
- Do not error on missing dependencies until full linking by @daimond113
### Changed ## [0.5.0-rc.15] - 2024-12-08
- Switch from `log` to `tracing` for logging by @daimond113 Release not available on GitHub and yanked from crates.io due to issues while publishing.
## [0.5.1] - 2024-12-15
### Fixed
- Ignore build metadata when comparing CLI versions by @daimond113
## [0.5.0] - 2024-12-14
### Added ### Added
- Add support for multiple targets under the same package name in workspace members by @daimond113 - Add improved CLI styling by @daimond113
- Add `yes` argument to skip all prompts in publish command by @daimond113 - Install pesde dependencies before Wally to support scripts packages by @daimond113
- Publish all workspace members when publishing a workspace by @daimond113 - Support packages exporting scripts by @daimond113
- Inform user about not finding any bin package when using its bin invocation by @daimond113 - Support using workspace root as a member by @daimond113
- Support full version requirements in workspace version field by @daimond113
- Improved authentication system for registry changes by @daimond113 ### Removed
- New website by @lukadev-0 - Remove special scripts repo handling to favour standard packages by @daimond113
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
- Support fallback Wally registries by @daimond113 ### Fixed
- Link dependencies before type extraction to support more use cases by @daimond113
- Strip `.luau` extension from linker modules' require paths to comply with Luau by @daimond113
- Correctly handle graph paths for resolving overriden packages by @daimond113
- Do not require `--` in bin package executables on Unix by @daimond113
## [0.5.0-rc.14] - 2024-11-30
### Fixed
- Fix `includes` not supporting root files by @daimond113
## [0.5.0-rc.13] - 2024-11-28
### Added
- Print that no updates are available in `outdated` command by @daimond113 - Print that no updates are available in `outdated` command by @daimond113
- Support negated globs in `workspace_members` field by @daimond113 - Support negated globs in `workspace_members` field by @daimond113
- Make `includes` use glob patterns by @daimond113 - Make `includes` use glob patterns by @daimond113
- Use symlinks for workspace dependencies to not require reinstalling by @daimond113 - Use symlinks for workspace dependencies to not require reinstalling by @daimond113
- Add `auth token` command to print the auth token for the index by @daimond113 - Add `auth token` command to print the auth token for the index by @daimond113
- Support specifying which external registries are allowed on registries by @daimond113 - Support specifying which external registries are allowed on registries by @daimond113
- Add improved CLI styling by @daimond113
- Install pesde dependencies before Wally to support scripts packages by @daimond113
- Support packages exporting scripts by @daimond113
- Support using workspace root as a member by @daimond113
- Allow multiple, user selectable scripts packages to be selected (& custom packages inputted) in `init` command by @daimond113
- Support granular control over which repositories are allowed in various specifier types by @daimond113
- Display included scripts in `publish` command by @daimond113
### Fixed ### Fixed
- Fix versions with dots not being handled correctly by @daimond113 - Install dependencies of packages in `x` command by @daimond113
- Use workspace specifiers' `target` field when resolving by @daimond113
- Add feature gates to `wally-compat` specific code in init command by @daimond113 ### Performance
- Remove duplicated manifest file name in `publish` command by @daimond113 - Asyncify dependency linking by @daimond113
- Allow use of Luau packages in `execute` command by @daimond113
- Fix `self-upgrade` overwriting its own binary by @daimond113 ## [0.5.0-rc.12] - 2024-11-22
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113 ### Added
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113 - Support fallback Wally registries by @daimond113
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
- Make GitHub OAuth client ID config optional by @daimond113 ### Fixed
- Use updated aliases when reusing lockfile dependencies by @daimond113 - Fix peer dependencies being resolved incorrectly by @daimond113
- Listen for device flow completion without requiring pressing enter by @daimond113 - Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
- Sync scripts repo in background by @daimond113
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113 ## [0.5.0-rc.11] - 2024-11-20
- Validate package names are lowercase by @daimond113 ### Fixed
- Add back mistakenly removed updates check caching by @daimond113
- Set download error source to inner error to propagate the error by @daimond113
- Correctly copy workspace packages by @daimond113
## [0.5.0-rc.10] - 2024-11-16
### Fixed
- Fix `self-install` doing a cross-device move by @daimond113
### Changed
- Only store `pesde_version` executables in the version cache by @daimond113
## [0.5.0-rc.9] - 2024-11-16
### Fixed
- Correctly link Wally server packages by @daimond113
### Changed
- `self-upgrade` now will check for updates by itself by default by @daimond113
## [0.5.0-rc.8] - 2024-11-12
### Added
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
### Fixed
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113 - Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
- Remove default.project.json from Git pesde dependencies by @daimond113 - Remove default.project.json from Git pesde dependencies by @daimond113
- Correctly (de)serialize workspace specifiers by @daimond113 - Correctly (de)serialize workspace specifiers by @daimond113
- Fix CAS finder algorithm issues with Windows by @daimond113 - Fix CAS finder algorithm issues with Windows by @daimond113
- Fix CAS finder algorithm's AlreadyExists error by @daimond113 - Fix CAS finder algorithm's AlreadyExists error by @daimond113
- Use moved path when setting file to read-only by @daimond113 - Use moved path when setting file to read-only by @daimond113
- Correctly link Wally server packages by @daimond113
- Fix `self-install` doing a cross-device move by @daimond113
- Add back mistakenly removed updates check caching by @daimond113
- Set download error source to inner error to propagate the error by @daimond113
- Correctly copy workspace packages by @daimond113
- Fix peer dependencies being resolved incorrectly by @daimond113
- Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
- Install dependencies of packages in `x` command by @daimond113
- Fix `includes` not supporting root files by @daimond113
- Link dependencies before type extraction to support more use cases by @daimond113
- Strip `.luau` extension from linker modules' require paths to comply with Luau by @daimond113
- Correctly handle graph paths for resolving overriden packages by @daimond113
- Do not require `--` in bin package executables on Unix by @daimond113
- Do not require lib or bin exports if package exports scripts by @daimond113
- Correctly resolve URLs in `publish` command by @daimond113
- Add Roblox types in linker modules even with no config generator script by @daimond113
### Removed ### Changed
- Remove special scripts repo handling to favour standard packages by @daimond113 - Switched to fs-err for better errors with file system operations by @daimond113
- Use body bytes over multipart for publishing packages by @daimond113
### Performance
- Switch to async Rust by @daimond113
## [0.5.0-rc.7] - 2024-10-30
### Added
- New website by @lukadev-0
### Fixed
- Use updated aliases when reusing lockfile dependencies by @daimond113
- Listen for device flow completion without requiring pressing enter by @daimond113
- Sync scripts repo in background by @daimond113
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
- Validate package names are lowercase by @daimond113
### Performance
- Clone dependency repos shallowly by @daimond113
### Changed
- Optimize boolean expression in `publish` command by @daimond113
## [0.5.0-rc.6] - 2024-10-14
### Added
- Support full version requirements in workspace version field by @daimond113
- Improved authentication system for registry changes by @daimond113
### Fixed
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
- Make GitHub OAuth client ID config optional by @daimond113
## [0.5.0-rc.5] - 2024-10-12
### Added
- Inform user about not finding any bin package when using its bin invocation by @daimond113
### Fixed
- Fix `self-upgrade` overwriting its own binary by @daimond113
- Allow use of Luau packages in `execute` command by @daimond113
- Remove duplicated manifest file name in `publish` command by @daimond113
## [0.5.0-rc.4] - 2024-10-12
### Added
- Add `yes` argument to skip all prompts in publish command by @daimond113
- Publish all workspace members when publishing a workspace by @daimond113
### Fixed
- Add feature gates to `wally-compat` specific code in init command by @daimond113
## [0.5.0-rc.3] - 2024-10-06
### Fixed
- Use workspace specifiers' `target` field when resolving by @daimond113
## [0.5.0-rc.2] - 2024-10-06
### Added
- Add support for multiple targets under the same package name in workspace members by @daimond113
### Fixed
- Fix versions with dots not being handled correctly by @daimond113
## [0.5.0-rc.1] - 2024-10-06
### Changed ### Changed
- Rewrite the entire project in a more maintainable way by @daimond113 - Rewrite the entire project in a more maintainable way by @daimond113
- Support workspaces by @daimond113 - Support workspaces by @daimond113
@ -99,20 +156,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Support multiple targets for a single package by @daimond113 - Support multiple targets for a single package by @daimond113
- Make registry much easier to self-host by @daimond113 - Make registry much easier to self-host by @daimond113
- Start maintaining a changelog by @daimond113 - Start maintaining a changelog by @daimond113
- Optimize boolean expression in `publish` command by @daimond113
- Switched to fs-err for better errors with file system operations by @daimond113
- Use body bytes over multipart for publishing packages by @daimond113
- `self-upgrade` now will check for updates by itself by default by @daimond113
- Only store `pesde_version` executables in the version cache by @daimond113
- Remove lower bound limit of 3 characters for pesde package names by @daimond113
### Performance [0.5.0-rc.16]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.15..v0.5.0-rc.16
- Clone dependency repos shallowly by @daimond113 [0.5.0-rc.15]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.14..v0.5.0-rc.15
- Switch to async Rust by @daimond113 [0.5.0-rc.14]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.13..v0.5.0-rc.14
- Asyncify dependency linking by @daimond113 [0.5.0-rc.13]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.12..v0.5.0-rc.13
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113 [0.5.0-rc.12]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.11..v0.5.0-rc.12
[0.5.0-rc.11]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.10..v0.5.0-rc.11
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2 [0.5.0-rc.10]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.9..v0.5.0-rc.10
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1 [0.5.0-rc.9]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.8..v0.5.0-rc.9
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0 [0.5.0-rc.8]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.7..v0.5.0-rc.8
[0.5.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0 [0.5.0-rc.7]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.6..v0.5.0-rc.7
[0.5.0-rc.6]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.5..v0.5.0-rc.6
[0.5.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.4..v0.5.0-rc.5
[0.5.0-rc.4]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.3..v0.5.0-rc.4
[0.5.0-rc.3]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.2..v0.5.0-rc.3
[0.5.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.1..v0.5.0-rc.2
[0.5.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0-rc.1

354
Cargo.lock generated
View file

@ -36,9 +36,9 @@ dependencies = [
[[package]] [[package]]
name = "actix-governor" name = "actix-governor"
version = "0.8.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a0cb8586d3fa368d00ef643e8ef77f5d3d5dfe5c7b333415a556bc12eb1c41a" checksum = "072a3d7907b945b0956f9721e01c117ad5765ce5be2fd9bb1e44a117c669de22"
dependencies = [ dependencies = [
"actix-http", "actix-http",
"actix-web", "actix-web",
@ -357,12 +357,6 @@ version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "arrayvec"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]] [[package]]
name = "async-broadcast" name = "async-broadcast"
version = "0.7.1" version = "0.7.1"
@ -686,7 +680,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
dependencies = [ dependencies = [
"memchr", "memchr",
"regex-automata 0.4.9", "regex-automata",
"serde", "serde",
] ]
@ -763,9 +757,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.39" version = "0.4.38"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
dependencies = [ dependencies = [
"android-tzdata", "android-tzdata",
"iana-time-zone", "iana-time-zone",
@ -1294,6 +1288,19 @@ dependencies = [
"syn 2.0.90", "syn 2.0.90",
] ]
[[package]]
name = "env_logger"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
dependencies = [
"humantime",
"is-terminal",
"log",
"regex",
"termcolor",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.1" version = "1.0.1"
@ -1689,7 +1696,7 @@ dependencies = [
"once_cell", "once_cell",
"regex", "regex",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1702,7 +1709,7 @@ dependencies = [
"gix-date", "gix-date",
"gix-utils", "gix-utils",
"itoa", "itoa",
"thiserror 2.0.7", "thiserror 2.0.5",
"winnow", "winnow",
] ]
@ -1719,7 +1726,7 @@ dependencies = [
"gix-trace", "gix-trace",
"kstring", "kstring",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
"unicode-bom", "unicode-bom",
] ]
@ -1729,7 +1736,7 @@ version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d48b897b4bbc881aea994b4a5bbb340a04979d7be9089791304e04a9fbc66b53" checksum = "d48b897b4bbc881aea994b4a5bbb340a04979d7be9089791304e04a9fbc66b53"
dependencies = [ dependencies = [
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1738,7 +1745,7 @@ version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6ffbeb3a5c0b8b84c3fe4133a6f8c82fa962f4caefe8d0762eced025d3eb4f7" checksum = "c6ffbeb3a5c0b8b84c3fe4133a6f8c82fa962f4caefe8d0762eced025d3eb4f7"
dependencies = [ dependencies = [
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1764,7 +1771,7 @@ dependencies = [
"gix-features", "gix-features",
"gix-hash", "gix-hash",
"memmap2", "memmap2",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1783,7 +1790,7 @@ dependencies = [
"memchr", "memchr",
"once_cell", "once_cell",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
"unicode-bom", "unicode-bom",
"winnow", "winnow",
] ]
@ -1798,7 +1805,7 @@ dependencies = [
"bstr", "bstr",
"gix-path", "gix-path",
"libc", "libc",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1815,7 +1822,7 @@ dependencies = [
"gix-sec", "gix-sec",
"gix-trace", "gix-trace",
"gix-url", "gix-url",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1827,7 +1834,7 @@ dependencies = [
"bstr", "bstr",
"itoa", "itoa",
"jiff", "jiff",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1839,7 +1846,7 @@ dependencies = [
"bstr", "bstr",
"gix-hash", "gix-hash",
"gix-object", "gix-object",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1855,7 +1862,7 @@ dependencies = [
"gix-path", "gix-path",
"gix-ref", "gix-ref",
"gix-sec", "gix-sec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1876,7 +1883,7 @@ dependencies = [
"parking_lot", "parking_lot",
"prodash", "prodash",
"sha1_smol", "sha1_smol",
"thiserror 2.0.7", "thiserror 2.0.5",
"walkdir", "walkdir",
] ]
@ -1898,7 +1905,7 @@ dependencies = [
"gix-trace", "gix-trace",
"gix-utils", "gix-utils",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1931,7 +1938,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5eccc17194ed0e67d49285e4853307e4147e95407f91c1c3e4a13ba9f4e4ce" checksum = "0b5eccc17194ed0e67d49285e4853307e4147e95407f91c1c3e4a13ba9f4e4ce"
dependencies = [ dependencies = [
"faster-hex", "faster-hex",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1983,7 +1990,7 @@ dependencies = [
"memmap2", "memmap2",
"rustix", "rustix",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -1994,7 +2001,7 @@ checksum = "1cd3ab68a452db63d9f3ebdacb10f30dba1fa0d31ac64f4203d395ed1102d940"
dependencies = [ dependencies = [
"gix-tempfile", "gix-tempfile",
"gix-utils", "gix-utils",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2010,7 +2017,7 @@ dependencies = [
"gix-object", "gix-object",
"gix-revwalk", "gix-revwalk",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2029,7 +2036,7 @@ dependencies = [
"gix-validate", "gix-validate",
"itoa", "itoa",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
"winnow", "winnow",
] ]
@ -2051,7 +2058,7 @@ dependencies = [
"gix-quote", "gix-quote",
"parking_lot", "parking_lot",
"tempfile", "tempfile",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2071,7 +2078,7 @@ dependencies = [
"memmap2", "memmap2",
"parking_lot", "parking_lot",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2083,7 +2090,7 @@ dependencies = [
"bstr", "bstr",
"faster-hex", "faster-hex",
"gix-trace", "gix-trace",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2095,7 +2102,7 @@ dependencies = [
"bstr", "bstr",
"faster-hex", "faster-hex",
"gix-trace", "gix-trace",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2108,7 +2115,7 @@ dependencies = [
"gix-trace", "gix-trace",
"home", "home",
"once_cell", "once_cell",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2123,7 +2130,7 @@ dependencies = [
"gix-config-value", "gix-config-value",
"gix-glob", "gix-glob",
"gix-path", "gix-path",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2136,7 +2143,7 @@ dependencies = [
"gix-config-value", "gix-config-value",
"parking_lot", "parking_lot",
"rustix", "rustix",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2153,7 +2160,7 @@ dependencies = [
"gix-transport", "gix-transport",
"gix-utils", "gix-utils",
"maybe-async", "maybe-async",
"thiserror 2.0.7", "thiserror 2.0.5",
"winnow", "winnow",
] ]
@ -2165,7 +2172,7 @@ checksum = "64a1e282216ec2ab2816cd57e6ed88f8009e634aec47562883c05ac8a7009a63"
dependencies = [ dependencies = [
"bstr", "bstr",
"gix-utils", "gix-utils",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2185,7 +2192,7 @@ dependencies = [
"gix-utils", "gix-utils",
"gix-validate", "gix-validate",
"memmap2", "memmap2",
"thiserror 2.0.7", "thiserror 2.0.5",
"winnow", "winnow",
] ]
@ -2200,7 +2207,7 @@ dependencies = [
"gix-revision", "gix-revision",
"gix-validate", "gix-validate",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2218,7 +2225,7 @@ dependencies = [
"gix-object", "gix-object",
"gix-revwalk", "gix-revwalk",
"gix-trace", "gix-trace",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2233,7 +2240,7 @@ dependencies = [
"gix-hashtable", "gix-hashtable",
"gix-object", "gix-object",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2260,7 +2267,7 @@ dependencies = [
"gix-pathspec", "gix-pathspec",
"gix-refspec", "gix-refspec",
"gix-url", "gix-url",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2298,7 +2305,7 @@ dependencies = [
"gix-sec", "gix-sec",
"gix-url", "gix-url",
"reqwest", "reqwest",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2315,7 +2322,7 @@ dependencies = [
"gix-object", "gix-object",
"gix-revwalk", "gix-revwalk",
"smallvec", "smallvec",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2327,7 +2334,7 @@ dependencies = [
"bstr", "bstr",
"gix-features", "gix-features",
"gix-path", "gix-path",
"thiserror 2.0.7", "thiserror 2.0.5",
"url", "url",
] ]
@ -2348,7 +2355,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd520d09f9f585b34b32aba1d0b36ada89ab7fefb54a8ca3fe37fc482a750937" checksum = "cd520d09f9f585b34b32aba1d0b36ada89ab7fefb54a8ca3fe37fc482a750937"
dependencies = [ dependencies = [
"bstr", "bstr",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
@ -2387,14 +2394,14 @@ dependencies = [
"gix-path", "gix-path",
"gix-worktree", "gix-worktree",
"io-close", "io-close",
"thiserror 2.0.7", "thiserror 2.0.5",
] ]
[[package]] [[package]]
name = "governor" name = "governor"
version = "0.8.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "842dc78579ce01e6a1576ad896edc92fca002dd60c9c3746b7fc2bec6fb429d0" checksum = "0746aa765db78b521451ef74221663b57ba595bf83f75d0ce23cc09447c8139f"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"dashmap", "dashmap",
@ -2601,6 +2608,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.5.1" version = "1.5.1"
@ -2880,10 +2893,19 @@ dependencies = [
"number_prefix", "number_prefix",
"portable-atomic", "portable-atomic",
"unicode-width 0.2.0", "unicode-width 0.2.0",
"vt100",
"web-time", "web-time",
] ]
[[package]]
name = "indicatif-log-bridge"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63703cf9069b85dbe6fe26e1c5230d013dee99d3559cd3d02ba39e099ef7ab02"
dependencies = [
"indicatif",
"log",
]
[[package]] [[package]]
name = "inout" name = "inout"
version = "0.1.3" version = "0.1.3"
@ -2948,6 +2970,17 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "is-terminal"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"
dependencies = [
"hermit-abi 0.4.0",
"libc",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "is-wsl" name = "is-wsl"
version = "0.4.0" version = "0.4.0"
@ -3207,15 +3240,6 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata 0.1.10",
]
[[package]] [[package]]
name = "maybe-async" name = "maybe-async"
version = "0.2.10" version = "0.2.10"
@ -3322,12 +3346,6 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2195bf6aa996a481483b29d62a7663eed3fe39600c460e323f8ff41e90bdd89b" checksum = "2195bf6aa996a481483b29d62a7663eed3fe39600c460e323f8ff41e90bdd89b"
[[package]]
name = "mutually_exclusive_features"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e94e1e6445d314f972ff7395df2de295fe51b71821694f0b0e1e79c4f12c8577"
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.12" version = "0.2.12"
@ -3389,16 +3407,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.4.3" version = "0.4.3"
@ -3598,12 +3606,6 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]] [[package]]
name = "ownedbytes" name = "ownedbytes"
version = "0.7.0" version = "0.7.0"
@ -3662,7 +3664,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]] [[package]]
name = "pesde" name = "pesde"
version = "0.5.3" version = "0.5.0-rc.16"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-compression", "async-compression",
@ -3678,10 +3680,13 @@ dependencies = [
"git2", "git2",
"gix", "gix",
"indicatif", "indicatif",
"indicatif-log-bridge",
"inquire", "inquire",
"keyring", "keyring",
"log",
"open", "open",
"pathdiff", "pathdiff",
"pretty_env_logger",
"relative-path", "relative-path",
"reqwest", "reqwest",
"semver", "semver",
@ -3690,15 +3695,12 @@ dependencies = [
"serde_with", "serde_with",
"sha2", "sha2",
"tempfile", "tempfile",
"thiserror 2.0.7", "thiserror 2.0.5",
"tokio", "tokio",
"tokio-tar", "tokio-tar",
"tokio-util", "tokio-util",
"toml", "toml",
"toml_edit", "toml_edit",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
"url", "url",
"wax", "wax",
"winreg", "winreg",
@ -3706,7 +3708,7 @@ dependencies = [
[[package]] [[package]]
name = "pesde-registry" name = "pesde-registry"
version = "0.1.2" version = "0.7.0"
dependencies = [ dependencies = [
"actix-cors", "actix-cors",
"actix-governor", "actix-governor",
@ -3721,7 +3723,9 @@ dependencies = [
"futures", "futures",
"git2", "git2",
"gix", "gix",
"log",
"pesde", "pesde",
"pretty_env_logger",
"reqwest", "reqwest",
"rusty-s3", "rusty-s3",
"semver", "semver",
@ -3733,13 +3737,11 @@ dependencies = [
"sha2", "sha2",
"tantivy", "tantivy",
"tempfile", "tempfile",
"thiserror 2.0.7", "thiserror 2.0.5",
"tokio", "tokio",
"tokio-tar", "tokio-tar",
"toml", "toml",
"tracing", "url",
"tracing-actix-web",
"tracing-subscriber",
] ]
[[package]] [[package]]
@ -3836,6 +3838,16 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "pretty_env_logger"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c"
dependencies = [
"env_logger",
"log",
]
[[package]] [[package]]
name = "proc-macro-crate" name = "proc-macro-crate"
version = "3.2.0" version = "3.2.0"
@ -3902,7 +3914,7 @@ dependencies = [
"rustc-hash 2.1.0", "rustc-hash 2.1.0",
"rustls", "rustls",
"socket2", "socket2",
"thiserror 2.0.7", "thiserror 2.0.5",
"tokio", "tokio",
"tracing", "tracing",
] ]
@ -3921,7 +3933,7 @@ dependencies = [
"rustls", "rustls",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
"thiserror 2.0.7", "thiserror 2.0.5",
"tinyvec", "tinyvec",
"tracing", "tracing",
"web-time", "web-time",
@ -4056,17 +4068,8 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-automata 0.4.9", "regex-automata",
"regex-syntax 0.8.5", "regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
] ]
[[package]] [[package]]
@ -4077,7 +4080,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-syntax 0.8.5", "regex-syntax",
] ]
[[package]] [[package]]
@ -4086,12 +4089,6 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.5" version = "0.8.5"
@ -4371,9 +4368,9 @@ dependencies = [
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.24" version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@ -4391,6 +4388,7 @@ dependencies = [
"sentry-contexts", "sentry-contexts",
"sentry-core", "sentry-core",
"sentry-debug-images", "sentry-debug-images",
"sentry-log",
"sentry-panic", "sentry-panic",
"sentry-tracing", "sentry-tracing",
"tokio", "tokio",
@ -4459,6 +4457,16 @@ dependencies = [
"sentry-core", "sentry-core",
] ]
[[package]]
name = "sentry-log"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efcbfbb74628eaef033c1154d4bb082437c7592ce2282c7c5ccb455c4c97a06d"
dependencies = [
"log",
"sentry-core",
]
[[package]] [[package]]
name = "sentry-panic" name = "sentry-panic"
version = "0.35.0" version = "0.35.0"
@ -4500,18 +4508,18 @@ dependencies = [
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.216" version = "1.0.215"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.216" version = "1.0.215"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -4633,15 +4641,6 @@ dependencies = [
"digest", "digest",
] ]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]] [[package]]
name = "shell-words" name = "shell-words"
version = "1.1.0" version = "1.1.0"
@ -4926,7 +4925,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d60769b80ad7953d8a7b2c70cdfe722bbcdcac6bccc8ac934c40c034d866fc18" checksum = "d60769b80ad7953d8a7b2c70cdfe722bbcdcac6bccc8ac934c40c034d866fc18"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"regex-syntax 0.8.5", "regex-syntax",
"utf8-ranges", "utf8-ranges",
] ]
@ -4984,6 +4983,15 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.69" version = "1.0.69"
@ -4995,11 +5003,11 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.7" version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" checksum = "643caef17e3128658ff44d85923ef2d28af81bb71e0d67bbfe1d76f19a73e053"
dependencies = [ dependencies = [
"thiserror-impl 2.0.7", "thiserror-impl 2.0.5",
] ]
[[package]] [[package]]
@ -5015,9 +5023,9 @@ dependencies = [
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "2.0.7" version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" checksum = "995d0bbc9995d1f19d28b7215a9352b0fc3cd3a2d2ec95c2cadc485cdedbcdde"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -5231,19 +5239,6 @@ dependencies = [
"tracing-core", "tracing-core",
] ]
[[package]]
name = "tracing-actix-web"
version = "0.7.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54a9f5c1aca50ebebf074ee665b9f99f2e84906dcf6b993a0d0090edb835166d"
dependencies = [
"actix-web",
"mutually_exclusive_features",
"pin-project",
"tracing",
"uuid",
]
[[package]] [[package]]
name = "tracing-attributes" name = "tracing-attributes"
version = "0.1.28" version = "0.1.28"
@ -5265,45 +5260,13 @@ dependencies = [
"valuable", "valuable",
] ]
[[package]]
name = "tracing-indicatif"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74ba258e9de86447f75edf6455fded8e5242704c6fccffe7bf8d7fb6daef1180"
dependencies = [
"indicatif",
"tracing",
"tracing-core",
"tracing-subscriber",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]] [[package]]
name = "tracing-subscriber" name = "tracing-subscriber"
version = "0.3.19" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [ dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log",
] ]
[[package]] [[package]]
@ -5474,39 +5437,6 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vt100"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
dependencies = [
"itoa",
"log",
"unicode-width 0.1.14",
"vte",
]
[[package]]
name = "vte"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
dependencies = [
"arrayvec",
"utf8parse",
"vte_generate_state_changes",
]
[[package]]
name = "vte_generate_state_changes"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e"
dependencies = [
"proc-macro2",
"quote",
]
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.5.0" version = "2.5.0"

View file

@ -1,6 +1,6 @@
[package] [package]
name = "pesde" name = "pesde"
version = "0.5.3" version = "0.5.0-rc.16"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
authors = ["daimond113 <contact@daimond113.com>"] authors = ["daimond113 <contact@daimond113.com>"]
@ -13,10 +13,10 @@ include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHAN
bin = [ bin = [
"dep:clap", "dep:clap",
"dep:dirs", "dep:dirs",
"dep:tracing-subscriber", "dep:pretty_env_logger",
"reqwest/json", "reqwest/json",
"dep:indicatif", "dep:indicatif",
"dep:tracing-indicatif", "dep:indicatif-log-bridge",
"dep:inquire", "dep:inquire",
"dep:toml_edit", "dep:toml_edit",
"dep:colored", "dep:colored",
@ -44,25 +44,25 @@ required-features = ["bin"]
uninlined_format_args = "warn" uninlined_format_args = "warn"
[dependencies] [dependencies]
serde = { version = "1.0.216", features = ["derive"] } serde = { version = "1.0.215", features = ["derive"] }
toml = "0.8.19" toml = "0.8.19"
serde_with = "3.11.0" serde_with = "3.11.0"
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] } gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.24", features = ["serde"] } semver = { version = "1.0.23", features = ["serde"] }
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] } reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
tokio-tar = "0.3.1" tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] } async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
pathdiff = "0.2.3" pathdiff = "0.2.3"
relative-path = { version = "1.9.3", features = ["serde"] } relative-path = { version = "1.9.3", features = ["serde"] }
tracing = { version = "0.1.41", features = ["attributes"] } log = "0.4.22"
thiserror = "2.0.7" thiserror = "2.0.5"
tokio = { version = "1.42.0", features = ["process"] } tokio = { version = "1.42.0", features = ["process"] }
tokio-util = "0.7.13" tokio-util = "0.7.13"
async-stream = "0.3.6" async-stream = "0.3.6"
futures = "0.3.31" futures = "0.3.31"
full_moon = { version = "1.1.2", features = ["luau"] } full_moon = { version = "1.1.2", features = ["luau"] }
url = { version = "2.5.4", features = ["serde"] } url = { version = "2.5.4", features = ["serde"] }
chrono = { version = "0.4.39", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
sha2 = "0.10.8" sha2 = "0.10.8"
tempfile = "3.14.0" tempfile = "3.14.0"
wax = { version = "0.6.0", default-features = false } wax = { version = "0.6.0", default-features = false }
@ -81,9 +81,9 @@ colored = { version = "2.1.0", optional = true }
toml_edit = { version = "0.22.22", optional = true } toml_edit = { version = "0.22.22", optional = true }
clap = { version = "4.5.23", features = ["derive"], optional = true } clap = { version = "4.5.23", features = ["derive"], optional = true }
dirs = { version = "5.0.1", optional = true } dirs = { version = "5.0.1", optional = true }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true } pretty_env_logger = { version = "0.5.0", optional = true }
indicatif = { version = "0.17.9", optional = true } indicatif = { version = "0.17.9", optional = true }
tracing-indicatif = { version = "0.3.8", optional = true } indicatif-log-bridge = { version = "0.2.3", optional = true }
inquire = { version = "0.7.5", optional = true } inquire = { version = "0.7.5", optional = true }
[target.'cfg(target_os = "windows")'.dependencies] [target.'cfg(target_os = "windows")'.dependencies]

View file

@ -1,25 +0,0 @@
# Security Policy
## Supported Versions
As pesde is currently in version 0.x, we can only guarantee security for:
- **The latest minor** (currently 0.5).
- **The latest release candidate for the next version**, if available.
When a new minor version is released, the previous version will immediately lose security support.
> **Note:** This policy will change with the release of version 1.0, which will include an extended support period for versions >=1.0.
| Version | Supported |
| ------- | ------------------ |
| 0.5.x | :white_check_mark: |
| < 0.5 | :x: |
## Reporting a Vulnerability
We encourage all security concerns to be reported at [pesde@daimond113.com](mailto:pesde@daimond113.com), along the following format:
- **Subject**: The subject must be prefixed with `[SECURITY]` to ensure it is prioritized as a security concern.
- **Content**:
- **Affected Versions**: Clearly specify which are affected by the issue.
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
We will try to respond as soon as possible.

View file

@ -38,17 +38,17 @@ Git dependencies are dependencies on packages hosted on a Git repository.
```toml title="pesde.toml" ```toml title="pesde.toml"
[dependencies] [dependencies]
acme = { repo = "acme/package", rev = "aeff6" } acme = { repo = "acme/package", rev = "main" }
``` ```
In this example, we're specifying a dependency on the package contained within In this example, we're specifying a dependency on the package contained within
the `acme/package` GitHub repository at the `aeff6` commit. the `acme/package` GitHub repository at the `main` branch.
You can also use a URL to specify the Git repository and a tag for the revision. You can also use a URL to specify the Git repository and a specific commit.
```toml title="pesde.toml" ```toml title="pesde.toml"
[dependencies] [dependencies]
acme = { repo = "https://git.acme.local/package.git", rev = "v0.1.0" } acme = { repo = "https://git.acme.local/package.git", rev = "aeff6" }
``` ```
You can also specify a path if the package is not at the root of the repository. You can also specify a path if the package is not at the root of the repository.

View file

@ -20,15 +20,15 @@ to get it added.
Studio. Studio.
Running `pesde init` will prompt you to select a target, select Running `pesde init` will prompt you to select a target, select
`roblox` or `roblox_server` in this case. You will be prompted to pick out a `roblox` or `roblox_server` in this case. This will setup the configuration
scripts package. Select `pesde/scripts_rojo` to get started with Rojo. needed to use pesde in a project using Rojo.
## Usage with other tools ## Usage with other tools
If you are using a different sync tool, you should look for it's scripts If you are using a different sync tool, you should look for it's scripts in the
package on the registry. If you cannot find it, you can write your own and pesde-scripts repository. If you cannot find them, you can write your own and
optionally submit a PR to pesde-scripts to help others using the same tool as optionally submit a PR to help others using the same tool as you get started
you get started quicker. quicker.
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server` Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
target, and then create a `.pesde/roblox_sync_config_generator.luau` script target, and then create a `.pesde/roblox_sync_config_generator.luau` script

View file

@ -1,53 +0,0 @@
---
title: Using Scripts Packages
description: Learn how to use scripts packages.
---
A **scripts package** is a package that contains scripts. The scripts provided
by the package are linked in `.pesde/{alias}/{script_name}.luau` of the project
that uses the package.
## Using a scripts package
Scripts packages can be installed using the `pesde add` and `pesde install`
commands.
This requires a `pesde.toml` file to be present in the current directory, and
will add the scripts package to the `dependencies` section of the file.
```sh
pesde add pesde/scripts_rojo
pesde install
```
This will add the scripts package to your project, and installing will put the
scripts at `.pesde/scripts_rojo/{script_name}.luau`. You can then add the scripts
to your manifest, for example:
```toml title="pesde.toml"
[scripts]
roblox_sync_config_generator = ".pesde/scripts_rojo/roblox_sync_config_generator.luau"
```
## Making a scripts package
To make a scripts package you must use a target compatible with scripts exports.
These currently are `lune` and `luau`.
Here is an example of a scripts package:
```toml title="pesde.toml"
name = "pesde/scripts_rojo"
version = "1.0.0"
license = "MIT"
[target]
environment = "lune"
[target.scripts]
roblox_sync_config_generator = "roblox_sync_config_generator.luau"
```
The `scripts` table in the target is a map of script names to the path of the
script in the package. The scripts will be linked in the project that uses the
package at `.pesde/{alias}/{script_name}.luau`.

View file

@ -19,10 +19,10 @@ To create an index, create a new repository and add a `config.toml` file with
the following content: the following content:
```toml title="config.toml" ```toml title="config.toml"
# the URL of the registry API # The URL of the registry API
api = "https://registry.acme.local/" api = "https://registry.acme.local/"
# package download URL (optional) # Package download URL (optional)
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}" download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
# the client ID of the GitHub OAuth app (optional) # the client ID of the GitHub OAuth app (optional)
@ -33,16 +33,13 @@ git_allowed = true
# whether to allow packages which depend on packages from other registries # whether to allow packages which depend on packages from other registries
# (default: false) # (default: false)
other_registries_allowed = ["https://git.acme.local/index"] other_registries_allowed = true
# whether to allow packages with Wally dependencies (default: false) # whether to allow packages with Wally dependencies (default: false)
wally_allowed = false wally_allowed = false
# the maximum size of the archive in bytes (default: 4MB) # the maximum size of the archive in bytes (default: 4MB)
max_archive_size = 4194304 max_archive_size = 4194304
# the scripts packages present in the `init` command selection by default
scripts_packages = ["pesde/scripts_rojo"]
``` ```
- **api**: The URL of the registry API. See below for more information. - **api**: The URL of the registry API. See below for more information.
@ -63,24 +60,18 @@ scripts_packages = ["pesde/scripts_rojo"]
- **github_oauth_client_id**: This is required if you use GitHub OAuth for - **github_oauth_client_id**: This is required if you use GitHub OAuth for
authentication. See below for more information. authentication. See below for more information.
- **git_allowed**: Whether to allow packages with Git dependencies. This can be - **git_allowed**: Whether to allow packages with Git dependencies. This is
either a bool or a list of allowed repository URLs. This is optional and optional and defaults to `false`.
defaults to `false`.
- **other_registries_allowed**: Whether to allow packages which depend on - **other_registries_allowed**: Whether to allow packages which depend on
packages from other registries. This can be either a bool or a list of packages from other registries. This is optional and defaults to `false`.
allowed index repository URLs. This is optional and defaults to `false`.
- **wally_allowed**: Whether to allow packages with Wally dependencies. This can - **wally_allowed**: Whether to allow packages with Wally dependencies. This is
be either a bool or a list of allowed index repository URLs. This is
optional and defaults to `false`. optional and defaults to `false`.
- **max_archive_size**: The maximum size of the archive in bytes. This is - **max_archive_size**: The maximum size of the archive in bytes. This is
optional and defaults to `4194304` (4MB). optional and defaults to `4194304` (4MB).
- **scripts_packages**: The scripts packages present in the `init` command
selection by default. This is optional and defaults to none.
You should then push this repository to [GitHub](https://github.com/). You should then push this repository to [GitHub](https://github.com/).
## Configuring the registry ## Configuring the registry
@ -97,8 +88,8 @@ has access to the index repository. We recommend using a separate account
for this purpose. for this purpose.
<Aside> <Aside>
For a GitHub account the password **must** be a personal access token. For instructions on how to For a GitHub account the password **must** be a personal access token. For
create a personal access token, see the [GitHub instructions on how to create a personal access token, see the [GitHub
documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens). documentation](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens).
The access token must have read and write access to the index repository. The access token must have read and write access to the index repository.
</Aside> </Aside>

View file

@ -41,16 +41,6 @@ You can follow the installation instructions in the
pesde should now be installed on your system. You may need to restart your pesde should now be installed on your system. You may need to restart your
computer for the changes to take effect. computer for the changes to take effect.
<Aside type="caution">
pesde uses symlinks which are an administrator-level operation on Windows.
To ensure proper functionality, enable [Developer Mode](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
If you are getting errors such as `Failed to symlink file, a required
privilege is not held by the client`, then enabling this setting will fix
them.
</Aside>
</TabItem> </TabItem>
<TabItem label="Linux & macOS"> <TabItem label="Linux & macOS">
@ -69,7 +59,7 @@ You can follow the installation instructions in the
environment variable. environment variable.
```sh title=".zshrc" ```sh title=".zshrc"
export PATH="$PATH:$HOME/.pesde/bin" export PATH = "$PATH:/home/user/.pesde/bin"
``` ```
You should then be able to run `pesde` after restarting your shell. You should then be able to run `pesde` after restarting your shell.

View file

@ -155,19 +155,6 @@ build_files = [
These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator) These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator)
when the package is installed in order to generate the necessary configuration. when the package is installed in order to generate the necessary configuration.
### `scripts`
**Allowed in:** `luau`, `lune`
A list of scripts that will be linked to the dependant's `.pesde` directory, and
copied over to the [scripts](#scripts-1) section when initialising a project with
this package as the scripts package.
```toml
[target.scripts]
roblox_sync_config_generator = "scripts/roblox_sync_config_generator.luau"
```
## `[scripts]` ## `[scripts]`
The `[scripts]` section contains scripts that can be run using the `pesde run` The `[scripts]` section contains scripts that can be run using the `pesde run`
@ -190,6 +177,10 @@ sync tools.
of files specified within the [`target.build_files`](#build_files) of the of files specified within the [`target.build_files`](#build_files) of the
package. package.
You can find template scripts inside the
[`pesde-scripts` repository](https://github.com/pesde-pkg/scripts)
for various sync tools.
<LinkCard <LinkCard
title="Roblox" title="Roblox"
description="Learn more about using pesde in Roblox projects." description="Learn more about using pesde in Roblox projects."
@ -369,14 +360,14 @@ foo = { wally = "acme/foo", version = "1.2.3", index = "acme" }
```toml ```toml
[dependencies] [dependencies]
foo = { repo = "acme/packages", rev = "aeff6", path = "foo" } foo = { repo = "acme/packages", rev = "main", path = "foo" }
``` ```
**Git dependencies** contain the following fields: **Git dependencies** contain the following fields:
- `repo`: The URL of the Git repository. - `repo`: The URL of the Git repository.
This can either be `<owner>/<name>` for a GitHub repository, or a full URL. This can either be `<owner>/<name>` for a GitHub repository, or a full URL.
- `rev`: The Git revision to install. This can be a tag or commit hash. - `rev`: The Git revision to install. This can be a branch, tag, or commit hash.
- `path`: The path within the repository to install. If not specified, the root - `path`: The path within the repository to install. If not specified, the root
of the repository is used. of the repository is used.

View file

@ -1,22 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.1.2]
### Changed
- Update to pesde lib API changes by @daimond113
## [0.1.1] - 2024-12-19
### Changed
- Switch to traccing for logging by @daimond113
## [0.1.0] - 2024-12-14
### Added
- Rewrite registry for pesde v0.5.0 by @daimond113
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0

View file

@ -1,6 +1,6 @@
[package] [package]
name = "pesde-registry" name = "pesde-registry"
version = "0.1.2" version = "0.7.0"
edition = "2021" edition = "2021"
repository = "https://github.com/pesde-pkg/index" repository = "https://github.com/pesde-pkg/index"
publish = false publish = false
@ -8,12 +8,13 @@ publish = false
[dependencies] [dependencies]
actix-web = "4.9.0" actix-web = "4.9.0"
actix-cors = "0.7.0" actix-cors = "0.7.0"
actix-governor = "0.8.0" actix-governor = "0.7.0"
dotenvy = "0.15.7" dotenvy = "0.15.7"
thiserror = "2.0.7" thiserror = "2.0.5"
tantivy = "0.22.0" tantivy = "0.22.0"
semver = "1.0.24" semver = "1.0.23"
chrono = { version = "0.4.39", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
url = "2.5.4"
futures = "0.3.31" futures = "0.3.31"
tokio = "1.42.0" tokio = "1.42.0"
tempfile = "3.14.0" tempfile = "3.14.0"
@ -26,7 +27,7 @@ gix = { version = "0.68.0", default-features = false, features = [
"credentials", "credentials",
] } ] }
serde = "1.0.216" serde = "1.0.215"
serde_json = "1.0.133" serde_json = "1.0.133"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
toml = "0.8.19" toml = "0.8.19"
@ -40,11 +41,10 @@ constant_time_eq = "0.3.1"
tokio-tar = "0.3.1" tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] } async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
tracing = { version = "0.1.41", features = ["attributes"] } log = "0.4.22"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } pretty_env_logger = "0.5.0"
tracing-actix-web = "0.7.15"
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] } sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "log"] }
sentry-actix = "0.35.0" sentry-actix = "0.35.0"
pesde = { path = "..", features = ["wally-compat"] } pesde = { path = "..", features = ["wally-compat"] }

View file

@ -45,7 +45,7 @@ impl AuthImpl for GitHubAuth {
return Ok(None); return Ok(None);
} }
Err(_) => { Err(_) => {
tracing::error!( log::error!(
"failed to get user: {}", "failed to get user: {}",
response.into_error().await.unwrap_err() response.into_error().await.unwrap_err()
); );
@ -53,7 +53,7 @@ impl AuthImpl for GitHubAuth {
} }
}, },
Err(e) => { Err(e) => {
tracing::error!("failed to get user: {e}"); log::error!("failed to get user: {e}");
return Ok(None); return Ok(None);
} }
}; };
@ -61,7 +61,7 @@ impl AuthImpl for GitHubAuth {
let user_id = match response.json::<UserResponse>().await { let user_id = match response.json::<UserResponse>().await {
Ok(resp) => resp.user.id, Ok(resp) => resp.user.id,
Err(e) => { Err(e) => {
tracing::error!("failed to get user: {e}"); log::error!("failed to get user: {e}");
return Ok(None); return Ok(None);
} }
}; };

View file

@ -71,7 +71,7 @@ pub async fn get_package_version(
let (scope, name_part) = name.as_str(); let (scope, name_part) = name.as_str();
let file: IndexFile = { let entries: IndexFile = {
let source = app_state.source.lock().await; let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?; let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?; let tree = root_tree(&repo)?;
@ -84,15 +84,14 @@ pub async fn get_package_version(
let Some((v_id, entry, targets)) = ({ let Some((v_id, entry, targets)) = ({
let version = match version { let version = match version {
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() { VersionRequest::Latest => match entries.keys().map(|k| k.version()).max() {
Some(latest) => latest.clone(), Some(latest) => latest.clone(),
None => return Ok(HttpResponse::NotFound().finish()), None => return Ok(HttpResponse::NotFound().finish()),
}, },
VersionRequest::Specific(version) => version, VersionRequest::Specific(version) => version,
}; };
let versions = file let versions = entries
.entries
.iter() .iter()
.filter(|(v_id, _)| *v_id.version() == version); .filter(|(v_id, _)| *v_id.version() == version);

View file

@ -19,7 +19,7 @@ pub async fn get_package_versions(
let (scope, name_part) = name.as_str(); let (scope, name_part) = name.as_str();
let file: IndexFile = { let versions: IndexFile = {
let source = app_state.source.lock().await; let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?; let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?; let tree = root_tree(&repo)?;
@ -32,7 +32,7 @@ pub async fn get_package_versions(
let mut responses = BTreeMap::new(); let mut responses = BTreeMap::new();
for (v_id, entry) in file.entries { for (v_id, entry) in versions {
let info = responses let info = responses
.entry(v_id.version().clone()) .entry(v_id.version().clone())
.or_insert_with(|| PackageResponse { .or_insert_with(|| PackageResponse {

View file

@ -304,7 +304,7 @@ pub async fn publish_package(
.filter(|index| match gix::Url::try_from(*index) { .filter(|index| match gix::Url::try_from(*index) {
Ok(url) => config Ok(url) => config
.other_registries_allowed .other_registries_allowed
.is_allowed_or_same(source.repo_url().clone(), url), .is_allowed(source.repo_url().clone(), url),
Err(_) => false, Err(_) => false,
}) })
.is_none() .is_none()
@ -315,13 +315,16 @@ pub async fn publish_package(
} }
} }
DependencySpecifiers::Wally(specifier) => { DependencySpecifiers::Wally(specifier) => {
if !config.wally_allowed {
return Err(Error::InvalidArchive(
"wally dependencies are not allowed".into(),
));
}
if specifier if specifier
.index .index
.as_deref() .as_ref()
.filter(|index| match gix::Url::try_from(*index) { .filter(|index| index.parse::<url::Url>().is_ok())
Ok(url) => config.wally_allowed.is_allowed(url),
Err(_) => false,
})
.is_none() .is_none()
{ {
return Err(Error::InvalidArchive(format!( return Err(Error::InvalidArchive(format!(
@ -329,15 +332,15 @@ pub async fn publish_package(
))); )));
} }
} }
DependencySpecifiers::Git(specifier) => { DependencySpecifiers::Git(_) => {
if !config.git_allowed.is_allowed(specifier.repo.clone()) { if !config.git_allowed {
return Err(Error::InvalidArchive( return Err(Error::InvalidArchive(
"git dependencies are not allowed".into(), "git dependencies are not allowed".into(),
)); ));
} }
} }
DependencySpecifiers::Workspace(_) => { DependencySpecifiers::Workspace(_) => {
// workspace specifiers are to be transformed into pesde specifiers by the sender // workspace specifiers are to be transformed into Pesde specifiers by the sender
return Err(Error::InvalidArchive( return Err(Error::InvalidArchive(
"non-transformed workspace dependency".into(), "non-transformed workspace dependency".into(),
)); ));
@ -371,7 +374,7 @@ pub async fn publish_package(
} }
}; };
let mut file: IndexFile = let mut entries: IndexFile =
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?; toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
let new_entry = IndexFileEntry { let new_entry = IndexFileEntry {
@ -386,12 +389,11 @@ pub async fn publish_package(
dependencies, dependencies,
}; };
let this_version = file let this_version = entries
.entries
.keys() .keys()
.find(|v_id| *v_id.version() == manifest.version); .find(|v_id| *v_id.version() == manifest.version);
if let Some(this_version) = this_version { if let Some(this_version) = this_version {
let other_entry = file.entries.get(this_version).unwrap(); let other_entry = entries.get(this_version).unwrap();
// description cannot be different - which one to render in the "Recently published" list? // description cannot be different - which one to render in the "Recently published" list?
// the others cannot be different because what to return from the versions endpoint? // the others cannot be different because what to return from the versions endpoint?
@ -407,8 +409,7 @@ pub async fn publish_package(
} }
} }
if file if entries
.entries
.insert( .insert(
VersionId::new(manifest.version.clone(), manifest.target.kind()), VersionId::new(manifest.version.clone(), manifest.target.kind()),
new_entry.clone(), new_entry.clone(),
@ -424,7 +425,7 @@ pub async fn publish_package(
let reference = repo.find_reference(&refspec)?; let reference = repo.find_reference(&refspec)?;
{ {
let index_content = toml::to_string(&file)?; let index_content = toml::to_string(&entries)?;
let mut blob_writer = repo.blob_writer(None)?; let mut blob_writer = repo.blob_writer(None)?;
blob_writer.write_all(index_content.as_bytes())?; blob_writer.write_all(index_content.as_bytes())?;
oids.push((name, blob_writer.commit()?)); oids.push((name, blob_writer.commit()?));

View file

@ -68,11 +68,10 @@ pub async fn search_packages(
.unwrap(); .unwrap();
let (scope, name) = id.as_str(); let (scope, name) = id.as_str();
let file: IndexFile = let versions: IndexFile =
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap(); toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
let (latest_version, entry) = file let (latest_version, entry) = versions
.entries
.iter() .iter()
.max_by_key(|(v_id, _)| v_id.version()) .max_by_key(|(v_id, _)| v_id.version())
.unwrap(); .unwrap();
@ -80,19 +79,17 @@ pub async fn search_packages(
PackageResponse { PackageResponse {
name: id.to_string(), name: id.to_string(),
version: latest_version.version().to_string(), version: latest_version.version().to_string(),
targets: file targets: versions
.entries
.iter() .iter()
.filter(|(v_id, _)| v_id.version() == latest_version.version()) .filter(|(v_id, _)| v_id.version() == latest_version.version())
.map(|(_, entry)| (&entry.target).into()) .map(|(_, entry)| (&entry.target).into())
.collect(), .collect(),
description: entry.description.clone().unwrap_or_default(), description: entry.description.clone().unwrap_or_default(),
published_at: file published_at: versions
.entries
.values() .values()
.map(|entry| entry.published_at) .max_by_key(|entry| entry.published_at)
.max() .unwrap()
.unwrap(), .published_at,
license: entry.license.clone().unwrap_or_default(), license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(), authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()), repository: entry.repository.clone().map(|url| url.to_string()),

View file

@ -1,4 +1,5 @@
use actix_web::{body::BoxBody, HttpResponse, ResponseError}; use actix_web::{body::BoxBody, HttpResponse, ResponseError};
use log::error;
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError}; use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
use serde::Serialize; use serde::Serialize;
use thiserror::Error; use thiserror::Error;
@ -66,7 +67,7 @@ impl ResponseError for Error {
error: format!("archive is invalid: {e}"), error: format!("archive is invalid: {e}"),
}), }),
e => { e => {
tracing::error!("unhandled error: {e:?}"); log::error!("unhandled error: {e:?}");
HttpResponse::InternalServerError().finish() HttpResponse::InternalServerError().finish()
} }
} }

View file

@ -6,22 +6,19 @@ use crate::{
use actix_cors::Cors; use actix_cors::Cors;
use actix_governor::{Governor, GovernorConfigBuilder}; use actix_governor::{Governor, GovernorConfigBuilder};
use actix_web::{ use actix_web::{
middleware::{from_fn, Compress, NormalizePath, TrailingSlash}, middleware::{from_fn, Compress, Logger, NormalizePath, TrailingSlash},
rt::System, rt::System,
web, web,
web::PayloadConfig, web::PayloadConfig,
App, HttpServer, App, HttpServer,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use log::info;
use pesde::{ use pesde::{
source::{pesde::PesdePackageSource, traits::PackageSource}, source::{pesde::PesdePackageSource, traits::PackageSource},
AuthConfig, Project, AuthConfig, Project,
}; };
use std::{env::current_dir, path::PathBuf}; use std::{env::current_dir, path::PathBuf};
use tracing::level_filters::LevelFilter;
use tracing_subscriber::{
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
};
mod auth; mod auth;
mod endpoints; mod endpoints;
@ -119,12 +116,12 @@ async fn run() -> std::io::Result<()> {
let app_data = web::Data::new(AppState { let app_data = web::Data::new(AppState {
storage: { storage: {
let storage = get_storage_from_env(); let storage = get_storage_from_env();
tracing::info!("storage: {storage}"); info!("storage: {storage}");
storage storage
}, },
auth: { auth: {
let auth = get_auth_from_env(&config); let auth = get_auth_from_env(&config);
tracing::info!("auth: {auth}"); info!("auth: {auth}");
auth auth
}, },
source: tokio::sync::Mutex::new(source), source: tokio::sync::Mutex::new(source),
@ -143,12 +140,14 @@ async fn run() -> std::io::Result<()> {
.finish() .finish()
.unwrap(); .unwrap();
info!("listening on {address}:{port}");
HttpServer::new(move || { HttpServer::new(move || {
App::new() App::new()
.wrap(sentry_actix::Sentry::with_transaction()) .wrap(sentry_actix::Sentry::with_transaction())
.wrap(NormalizePath::new(TrailingSlash::Trim)) .wrap(NormalizePath::new(TrailingSlash::Trim))
.wrap(Cors::permissive()) .wrap(Cors::permissive())
.wrap(tracing_actix_web::TracingLogger::default()) .wrap(Logger::default())
.wrap(Compress::default()) .wrap(Compress::default())
.app_data(app_data.clone()) .app_data(app_data.clone())
.route( .route(
@ -201,26 +200,12 @@ async fn run() -> std::io::Result<()> {
fn main() -> std::io::Result<()> { fn main() -> std::io::Result<()> {
let _ = dotenvy::dotenv(); let _ = dotenvy::dotenv();
let tracing_env_filter = EnvFilter::builder() let mut log_builder = pretty_env_logger::formatted_builder();
.with_default_directive(LevelFilter::INFO.into()) log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
.from_env_lossy()
.add_directive("reqwest=info".parse().unwrap())
.add_directive("rustls=info".parse().unwrap())
.add_directive("tokio_util=info".parse().unwrap())
.add_directive("goblin=info".parse().unwrap())
.add_directive("tower=info".parse().unwrap())
.add_directive("hyper=info".parse().unwrap())
.add_directive("h2=info".parse().unwrap());
tracing_subscriber::registry() let logger = sentry::integrations::log::SentryLogger::with_dest(log_builder.build());
.with(tracing_env_filter) log::set_boxed_logger(Box::new(logger)).unwrap();
.with( log::set_max_level(log::LevelFilter::Info);
tracing_subscriber::fmt::layer()
.compact()
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
)
.with(sentry::integrations::tracing::layer())
.init();
let guard = sentry::init(sentry::ClientOptions { let guard = sentry::init(sentry::ClientOptions {
release: sentry::release_name!(), release: sentry::release_name!(),
@ -233,9 +218,9 @@ fn main() -> std::io::Result<()> {
if guard.is_enabled() { if guard.is_enabled() {
std::env::set_var("RUST_BACKTRACE", "full"); std::env::set_var("RUST_BACKTRACE", "full");
tracing::info!("sentry initialized"); info!("sentry initialized");
} else { } else {
tracing::info!("sentry **NOT** initialized"); info!("sentry **NOT** initialized");
} }
System::new().block_on(run()) System::new().block_on(run())

View file

@ -8,8 +8,6 @@ pub struct TargetInfo {
kind: TargetKind, kind: TargetKind,
lib: bool, lib: bool,
bin: bool, bin: bool,
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
scripts: BTreeSet<String>,
} }
impl From<Target> for TargetInfo { impl From<Target> for TargetInfo {
@ -24,10 +22,6 @@ impl From<&Target> for TargetInfo {
kind: target.kind(), kind: target.kind(),
lib: target.lib_path().is_some(), lib: target.lib_path().is_some(),
bin: target.bin_path().is_some(), bin: target.bin_path().is_some(),
scripts: target
.scripts()
.map(|scripts| scripts.keys().cloned().collect())
.unwrap_or_default(),
} }
} }
} }

View file

@ -104,8 +104,8 @@ pub async fn make_search(
pin!(stream); pin!(stream);
while let Some((pkg_name, mut file)) = stream.next().await { while let Some((pkg_name, mut file)) = stream.next().await {
let Some((_, latest_entry)) = file.entries.pop_last() else { let Some((_, latest_entry)) = file.pop_last() else {
tracing::error!("no versions found for {pkg_name}"); log::warn!("no versions found for {pkg_name}");
continue; continue;
}; };

View file

@ -5,7 +5,6 @@ use keyring::Entry;
use reqwest::header::AUTHORIZATION; use reqwest::header::AUTHORIZATION;
use serde::{ser::SerializeMap, Deserialize, Serialize}; use serde::{ser::SerializeMap, Deserialize, Serialize};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use tracing::instrument;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Tokens(pub BTreeMap<gix::Url, String>); pub struct Tokens(pub BTreeMap<gix::Url, String>);
@ -38,20 +37,15 @@ impl<'de> Deserialize<'de> for Tokens {
} }
} }
#[instrument(level = "trace")]
pub async fn get_tokens() -> anyhow::Result<Tokens> { pub async fn get_tokens() -> anyhow::Result<Tokens> {
let config = read_config().await?; let config = read_config().await?;
if !config.tokens.0.is_empty() { if !config.tokens.0.is_empty() {
tracing::debug!("using tokens from config");
return Ok(config.tokens); return Ok(config.tokens);
} }
match Entry::new("tokens", env!("CARGO_PKG_NAME")) { match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
Ok(entry) => match entry.get_password() { Ok(entry) => match entry.get_password() {
Ok(token) => { Ok(token) => return serde_json::from_str(&token).context("failed to parse tokens"),
tracing::debug!("using tokens from keyring");
return serde_json::from_str(&token).context("failed to parse tokens");
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {} Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
}, },
@ -62,22 +56,16 @@ pub async fn get_tokens() -> anyhow::Result<Tokens> {
Ok(Tokens(BTreeMap::new())) Ok(Tokens(BTreeMap::new()))
} }
#[instrument(level = "trace")]
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> { pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?; let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?; let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
match entry.set_password(&json) { match entry.set_password(&json) {
Ok(()) => { Ok(()) => return Ok(()),
tracing::debug!("tokens saved to keyring");
return Ok(());
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {} Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
} }
tracing::debug!("tokens saved to config");
let mut config = read_config().await?; let mut config = read_config().await?;
config.tokens = tokens; config.tokens = tokens;
write_config(&config).await.map_err(Into::into) write_config(&config).await.map_err(Into::into)
@ -98,7 +86,6 @@ struct UserResponse {
login: String, login: String,
} }
#[instrument(level = "trace")]
pub async fn get_token_login( pub async fn get_token_login(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
access_token: &str, access_token: &str,

View file

@ -2,7 +2,6 @@ use std::{collections::HashSet, str::FromStr};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize;
use semver::VersionReq; use semver::VersionReq;
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName}; use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
@ -63,7 +62,7 @@ impl AddCommand {
.cloned(); .cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) { if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
println!("{}: index {index} not found", "error".red().bold()); log::error!("index {index} not found");
return Ok(()); return Ok(());
} }
@ -90,7 +89,7 @@ impl AddCommand {
.cloned(); .cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) { if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
println!("{}: wally index {index} not found", "error".red().bold()); log::error!("wally index {index} not found");
return Ok(()); return Ok(());
} }
@ -146,7 +145,7 @@ impl AddCommand {
.pop_last() .pop_last()
.map(|(v_id, _)| v_id) .map(|(v_id, _)| v_id)
else { else {
println!("{}: no versions found for package", "error".red().bold()); log::error!("no versions found for package {specifier}");
return Ok(()); return Ok(());
}; };

View file

@ -2,6 +2,7 @@ use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
linking::generator::generate_bin_linking_module, linking::generator::generate_bin_linking_module,
manifest::target::TargetKind, manifest::target::TargetKind,
@ -34,7 +35,12 @@ pub struct ExecuteCommand {
} }
impl ExecuteCommand { impl ExecuteCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let index = match self.index { let index = match self.index {
Some(index) => Some(index), Some(index) => Some(index),
None => read_config().await.ok().map(|c| c.default_index), None => read_config().await.ok().map(|c| c.default_index),
@ -78,7 +84,7 @@ impl ExecuteCommand {
); );
}; };
println!("using {}@{version}", pkg_ref.name); log::info!("found package {}@{version}", pkg_ref.name);
let tmp_dir = project.cas_dir().join(".tmp"); let tmp_dir = project.cas_dir().join(".tmp");
fs::create_dir_all(&tmp_dir) fs::create_dir_all(&tmp_dir)
@ -128,6 +134,7 @@ impl ExecuteCommand {
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
"📥 ".to_string(), "📥 ".to_string(),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),

View file

@ -16,26 +16,11 @@ use pesde::{
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER, Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
}; };
use semver::VersionReq; use semver::VersionReq;
use std::{collections::HashSet, fmt::Display, str::FromStr}; use std::{collections::HashSet, str::FromStr};
#[derive(Debug, Args)] #[derive(Debug, Args)]
pub struct InitCommand {} pub struct InitCommand {}
#[derive(Debug)]
enum PackageNameOrCustom {
PackageName(PackageName),
Custom,
}
impl Display for PackageNameOrCustom {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
PackageNameOrCustom::Custom => write!(f, "custom"),
}
}
}
impl InitCommand { impl InitCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest().await { match project.read_manifest().await {
@ -142,49 +127,7 @@ impl InitCommand {
.await .await
.context("failed to get source config")?; .context("failed to get source config")?;
let scripts_package = if config.scripts_packages.is_empty() { if let Some(scripts_pkg_name) = config.scripts_package {
PackageNameOrCustom::Custom
} else {
inquire::Select::new(
"which scripts package do you want to use?",
config
.scripts_packages
.into_iter()
.map(PackageNameOrCustom::PackageName)
.chain(std::iter::once(PackageNameOrCustom::Custom))
.collect(),
)
.prompt()
.unwrap()
};
let scripts_package = match scripts_package {
PackageNameOrCustom::PackageName(p) => Some(p),
PackageNameOrCustom::Custom => {
let name = inquire::Text::new("which scripts package to use?")
.with_validator(|name: &str| {
if name.is_empty() {
return Ok(Validation::Valid);
}
Ok(match PackageName::from_str(name) {
Ok(_) => Validation::Valid,
Err(e) => Validation::Invalid(e.to_string().into()),
})
})
.with_help_message("leave empty for none")
.prompt()
.unwrap();
if name.is_empty() {
None
} else {
Some(PackageName::from_str(&name).unwrap())
}
}
};
if let Some(scripts_pkg_name) = scripts_package {
let (v_id, pkg_ref) = source let (v_id, pkg_ref) = source
.resolve( .resolve(
&PesdeDependencySpecifier { &PesdeDependencySpecifier {
@ -242,7 +185,7 @@ impl InitCommand {
} else { } else {
println!( println!(
"{}", "{}",
"no scripts package configured, this can cause issues with Roblox compatibility".red() "configured index hasn't a configured scripts package".red()
); );
if !inquire::prompt_confirmation("initialize regardless?").unwrap() { if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
return Ok(()); return Ok(());

View file

@ -6,6 +6,7 @@ use clap::Args;
use colored::{ColoredString, Colorize}; use colored::{ColoredString, Colorize};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::future::try_join_all; use futures::future::try_join_all;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project, download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
MANIFEST_FILE_NAME, MANIFEST_FILE_NAME,
@ -88,7 +89,12 @@ fn job(n: u8) -> ColoredString {
struct CallbackError(#[from] anyhow::Error); struct CallbackError(#[from] anyhow::Error);
impl InstallCommand { impl InstallCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
@ -110,10 +116,10 @@ impl InstallCommand {
match project.deser_lockfile().await { match project.deser_lockfile().await {
Ok(lockfile) => { Ok(lockfile) => {
if lockfile.overrides != manifest.overrides { if lockfile.overrides != manifest.overrides {
tracing::debug!("overrides are different"); log::debug!("overrides are different");
None None
} else if lockfile.target != manifest.target.kind() { } else if lockfile.target != manifest.target.kind() {
tracing::debug!("target kind is different"); log::debug!("target kind is different");
None None
} else { } else {
Some(lockfile) Some(lockfile)
@ -147,7 +153,7 @@ impl InstallCommand {
deleted_folders deleted_folders
.entry(folder.to_string()) .entry(folder.to_string())
.or_insert_with(|| async move { .or_insert_with(|| async move {
tracing::debug!("deleting the {folder} folder"); log::debug!("deleting the {folder} folder");
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder)) if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
.await .await
@ -213,7 +219,7 @@ impl InstallCommand {
.map(|(alias, _, _)| alias) .map(|(alias, _, _)| alias)
.filter(|alias| { .filter(|alias| {
if *alias == env!("CARGO_BIN_NAME") { if *alias == env!("CARGO_BIN_NAME") {
tracing::warn!( log::warn!(
"package {alias} has the same name as the CLI, skipping bin link" "package {alias} has the same name as the CLI, skipping bin link"
); );
return false; return false;
@ -251,7 +257,7 @@ impl InstallCommand {
fs::write( fs::write(
&bin_exec_file, &bin_exec_file,
format!(r#"#!/bin/sh format!(r#"#!/bin/sh
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""# lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
), ),
) )
.await .await
@ -275,6 +281,7 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
format!("{} 📥 ", job(3)), format!("{} 📥 ", job(3)),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),
@ -296,6 +303,7 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
progress_bar( progress_bar(
manifest.patches.values().map(|v| v.len() as u64).sum(), manifest.patches.values().map(|v| v.len() as u64).sum(),
rx, rx,
&multi,
format!("{} 🩹 ", job(JOBS - 1)), format!("{} 🩹 ", job(JOBS - 1)),
"applying patches".to_string(), "applying patches".to_string(),
"applied patches".to_string(), "applied patches".to_string(),
@ -315,8 +323,9 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
graph: downloaded_graph, graph: downloaded_graph,
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
let multi = multi.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, reqwest)).await } async move { Box::pin(self.run(project, multi, reqwest)).await }
}) })
.await?, .await?,
}) })

View file

@ -1,3 +1,4 @@
use indicatif::MultiProgress;
use pesde::Project; use pesde::Project;
mod add; mod add;
@ -71,13 +72,18 @@ pub enum Subcommand {
} }
impl Subcommand { impl Subcommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
match self { match self {
Subcommand::Auth(auth) => auth.run(project, reqwest).await, Subcommand::Auth(auth) => auth.run(project, reqwest).await,
Subcommand::Config(config) => config.run().await, Subcommand::Config(config) => config.run().await,
Subcommand::Init(init) => init.run(project).await, Subcommand::Init(init) => init.run(project).await,
Subcommand::Run(run) => run.run(project).await, Subcommand::Run(run) => run.run(project).await,
Subcommand::Install(install) => install.run(project, reqwest).await, Subcommand::Install(install) => install.run(project, multi, reqwest).await,
Subcommand::Publish(publish) => publish.run(project, reqwest).await, Subcommand::Publish(publish) => publish.run(project, reqwest).await,
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run().await, Subcommand::SelfInstall(self_install) => self_install.run().await,
@ -88,9 +94,9 @@ impl Subcommand {
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await, Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
Subcommand::Add(add) => add.run(project).await, Subcommand::Add(add) => add.run(project).await,
Subcommand::Update(update) => update.run(project, reqwest).await, Subcommand::Update(update) => update.run(project, multi, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await, Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest).await, Subcommand::Execute(execute) => execute.run(project, multi, reqwest).await,
} }
} }
} }

View file

@ -4,13 +4,11 @@ use async_compression::Level;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
#[allow(deprecated)]
use pesde::{ use pesde::{
manifest::{target::Target, DependencyType}, manifest::{target::Target, DependencyType},
matching_globs_old_behaviour, matching_globs_old_behaviour,
scripts::ScriptName, scripts::ScriptName,
source::{ source::{
git_index::GitBasedSource,
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource}, pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::PackageSource, traits::PackageSource,
@ -130,7 +128,6 @@ impl PublishCommand {
_ => None, _ => None,
}; };
#[allow(deprecated)]
let mut paths = matching_globs_old_behaviour( let mut paths = matching_globs_old_behaviour(
project.package_dir(), project.package_dir(),
manifest.includes.iter().map(|s| s.as_str()), manifest.includes.iter().map(|s| s.as_str()),
@ -365,6 +362,10 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
} }
} }
#[cfg(feature = "wally-compat")]
let mut has_wally = false;
let mut has_git = false;
for specifier in manifest for specifier in manifest
.dependencies .dependencies
.values_mut() .values_mut()
@ -388,6 +389,8 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
} }
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(specifier) => { DependencySpecifiers::Wally(specifier) => {
has_wally = true;
let index_name = specifier let index_name = specifier
.index .index
.as_deref() .as_deref()
@ -403,7 +406,9 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.to_string(), .to_string(),
); );
} }
DependencySpecifiers::Git(_) => {} DependencySpecifiers::Git(_) => {
has_git = true;
}
DependencySpecifiers::Workspace(spec) => { DependencySpecifiers::Workspace(spec) => {
let pkg_ref = WorkspacePackageSource let pkg_ref = WorkspacePackageSource
.resolve(spec, project, target_kind, &mut HashSet::new()) .resolve(spec, project, target_kind, &mut HashSet::new())
@ -501,16 +506,6 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.bin_path() .bin_path()
.map_or("(none)".to_string(), |p| p.to_string()) .map_or("(none)".to_string(), |p| p.to_string())
); );
println!(
"\tscripts: {}",
manifest
.target
.scripts()
.filter(|s| !s.is_empty())
.map_or("(none)".to_string(), |s| {
s.keys().cloned().collect::<Vec<_>>().join(", ")
})
);
} }
println!( println!(
@ -575,7 +570,8 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.get(&self.index) .get(&self.index)
.context(format!("missing index {}", self.index))?; .context(format!("missing index {}", self.index))?;
let source = PesdePackageSource::new(index_url.clone()); let source = PesdePackageSource::new(index_url.clone());
PackageSource::refresh(&source, project) source
.refresh(project)
.await .await
.context("failed to refresh source")?; .context("failed to refresh source")?;
let config = source let config = source
@ -591,23 +587,15 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
); );
} }
let deps = manifest.all_dependencies().context("dependency conflict")?; manifest.all_dependencies().context("dependency conflict")?;
if let Some((disallowed, _)) = deps.iter().find(|(_, (spec, _))| match spec { if !config.git_allowed && has_git {
DependencySpecifiers::Pesde(spec) => { anyhow::bail!("git dependencies are not allowed on this index");
!config.other_registries_allowed.is_allowed_or_same(
source.repo_url().clone(),
gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap(),
)
} }
DependencySpecifiers::Git(spec) => !config.git_allowed.is_allowed(spec.repo.clone()),
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(spec) => !config if !config.wally_allowed && has_wally {
.wally_allowed anyhow::bail!("wally dependencies are not allowed on this index");
.is_allowed(gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap()),
_ => false,
}) {
anyhow::bail!("dependency `{disallowed}` is not allowed on this index");
} }
if self.dry_run { if self.dry_run {
@ -626,7 +614,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.body(archive); .body(archive);
if let Some(token) = project.auth_config().tokens().get(index_url) { if let Some(token) = project.auth_config().tokens().get(index_url) {
tracing::debug!("using token for {index_url}"); log::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }

View file

@ -1,8 +1,7 @@
use crate::cli::{ use crate::cli::{
config::read_config, config::read_config,
version::{ version::{
current_version, get_or_download_version, get_remote_version, no_build_metadata, current_version, get_latest_remote_version, get_or_download_version, update_bin_exe,
update_bin_exe, TagInfo, VersionType,
}, },
}; };
use anyhow::Context; use anyhow::Context;
@ -25,33 +24,33 @@ impl SelfUpgradeCommand {
.context("no cached version found")? .context("no cached version found")?
.1 .1
} else { } else {
get_remote_version(&reqwest, VersionType::Latest).await? get_latest_remote_version(&reqwest).await?
}; };
let latest_version_no_metadata = no_build_metadata(&latest_version); if latest_version <= current_version() {
if latest_version_no_metadata <= current_version() {
println!("already up to date"); println!("already up to date");
return Ok(()); return Ok(());
} }
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
if !inquire::prompt_confirmation(format!( if !inquire::prompt_confirmation(format!(
"are you sure you want to upgrade {} from {} to {display_latest_version}?", "are you sure you want to upgrade {} from {} to {}?",
env!("CARGO_BIN_NAME").cyan(), env!("CARGO_BIN_NAME").cyan(),
env!("CARGO_PKG_VERSION").yellow().bold() current_version().to_string().yellow().bold(),
latest_version.to_string().yellow().bold()
))? { ))? {
println!("cancelled upgrade"); println!("cancelled upgrade");
return Ok(()); return Ok(());
} }
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true) let path = get_or_download_version(&reqwest, &latest_version, true)
.await? .await?
.unwrap(); .unwrap();
update_bin_exe(&path).await?; update_bin_exe(&path).await?;
println!("upgraded to version {display_latest_version}!"); println!(
"upgraded to version {}!",
latest_version.to_string().yellow().bold()
);
Ok(()) Ok(())
} }

View file

@ -2,6 +2,7 @@ use crate::cli::{progress_bar, run_on_workspace_members};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use indicatif::MultiProgress;
use pesde::{lockfile::Lockfile, Project}; use pesde::{lockfile::Lockfile, Project};
use std::{collections::HashSet, sync::Arc}; use std::{collections::HashSet, sync::Arc};
use tokio::sync::Mutex; use tokio::sync::Mutex;
@ -10,7 +11,12 @@ use tokio::sync::Mutex;
pub struct UpdateCommand {} pub struct UpdateCommand {}
impl UpdateCommand { impl UpdateCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
@ -54,6 +60,7 @@ impl UpdateCommand {
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
"📥 ".to_string(), "📥 ".to_string(),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),
@ -66,8 +73,9 @@ impl UpdateCommand {
}, },
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
let multi = multi.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, reqwest)).await } async move { Box::pin(self.run(project, multi, reqwest)).await }
}) })
.await?, .await?,
}) })

View file

@ -2,7 +2,6 @@ use crate::cli::{auth::Tokens, home_dir};
use anyhow::Context; use anyhow::Context;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]
@ -31,7 +30,6 @@ impl Default for CliConfig {
} }
} }
#[instrument(level = "trace")]
pub async fn read_config() -> anyhow::Result<CliConfig> { pub async fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await { let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
Ok(config_string) => config_string, Ok(config_string) => config_string,
@ -46,7 +44,6 @@ pub async fn read_config() -> anyhow::Result<CliConfig> {
Ok(config) Ok(config)
} }
#[instrument(level = "trace")]
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> { pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
let config_string = toml::to_string(config).context("failed to serialize config")?; let config_string = toml::to_string(config).context("failed to serialize config")?;
fs::write(home_dir()?.join("config.toml"), config_string) fs::write(home_dir()?.join("config.toml"), config_string)

View file

@ -2,6 +2,7 @@ use anyhow::Context;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::StreamExt; use futures::StreamExt;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
lockfile::Lockfile, lockfile::Lockfile,
manifest::target::TargetKind, manifest::target::TargetKind,
@ -18,7 +19,6 @@ use std::{
time::Duration, time::Duration,
}; };
use tokio::pin; use tokio::pin;
use tracing::instrument;
pub mod auth; pub mod auth;
pub mod commands; pub mod commands;
@ -43,7 +43,6 @@ pub async fn bin_dir() -> anyhow::Result<PathBuf> {
Ok(bin_dir) Ok(bin_dir)
} }
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> { pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest().await?; let manifest = project.deser_manifest().await?;
let lockfile = match project.deser_lockfile().await { let lockfile = match project.deser_lockfile().await {
@ -57,17 +56,17 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
}; };
if manifest.overrides != lockfile.overrides { if manifest.overrides != lockfile.overrides {
tracing::debug!("overrides are different"); log::debug!("overrides are different");
return Ok(None); return Ok(None);
} }
if manifest.target.kind() != lockfile.target { if manifest.target.kind() != lockfile.target {
tracing::debug!("target kind is different"); log::debug!("target kind is different");
return Ok(None); return Ok(None);
} }
if manifest.name != lockfile.name || manifest.version != lockfile.version { if manifest.name != lockfile.name || manifest.version != lockfile.version {
tracing::debug!("name or version is different"); log::debug!("name or version is different");
return Ok(None); return Ok(None);
} }
@ -89,7 +88,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
.iter() .iter()
.all(|(_, (spec, ty))| specs.contains(&(spec, ty))); .all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
tracing::debug!("dependencies are the same: {same_dependencies}"); log::debug!("dependencies are the same: {same_dependencies}");
Ok(if same_dependencies { Ok(if same_dependencies {
Some(lockfile) Some(lockfile)
@ -134,7 +133,7 @@ impl VersionedPackageName {
let versions = graph.get(&self.0).context("package not found in graph")?; let versions = graph.get(&self.0).context("package not found in graph")?;
if versions.len() == 1 { if versions.len() == 1 {
let version = versions.keys().next().unwrap().clone(); let version = versions.keys().next().unwrap().clone();
tracing::debug!("only one version found, using {version}"); log::debug!("only one version found, using {version}");
version version
} else { } else {
anyhow::bail!( anyhow::bail!(
@ -196,18 +195,21 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>( pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
len: u64, len: u64,
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>, mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
multi: &MultiProgress,
prefix: String, prefix: String,
progress_msg: String, progress_msg: String,
finish_msg: String, finish_msg: String,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let bar = indicatif::ProgressBar::new(len) let bar = multi.add(
indicatif::ProgressBar::new(len)
.with_style( .with_style(
indicatif::ProgressStyle::default_bar() indicatif::ProgressStyle::default_bar()
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")? .template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
.progress_chars("█▓▒░ "), .progress_chars("█▓▒░ "),
) )
.with_prefix(prefix) .with_prefix(prefix)
.with_message(progress_msg); .with_message(progress_msg),
);
bar.enable_steady_tick(Duration::from_millis(100)); bar.enable_steady_tick(Duration::from_millis(100));
while let Some(result) = rx.recv().await { while let Some(result) = rx.recv().await {

View file

@ -15,8 +15,7 @@ use std::{
env::current_exe, env::current_exe,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tokio::io::AsyncWrite; use tokio::io::AsyncReadExt;
use tracing::instrument;
pub fn current_version() -> Version { pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap() Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
@ -34,33 +33,18 @@ struct Asset {
url: url::Url, url: url::Url,
} }
#[instrument(level = "trace")]
fn get_repo() -> (String, String) { fn get_repo() -> (String, String) {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3); let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
let (owner, repo) = ( (
parts.next().unwrap().to_string(), parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(), parts.next().unwrap().to_string(),
); )
tracing::trace!("repository for updates: {owner}/{repo}");
(owner, repo)
} }
#[derive(Debug)] pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
pub enum VersionType {
Latest,
Specific(Version),
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_remote_version(
reqwest: &reqwest::Client,
ty: VersionType,
) -> anyhow::Result<Version> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
let mut releases = reqwest let releases = reqwest
.get(format!( .get(format!(
"https://api.github.com/repos/{owner}/{repo}/releases", "https://api.github.com/repos/{owner}/{repo}/releases",
)) ))
@ -71,28 +55,17 @@ pub async fn get_remote_version(
.context("failed to get GitHub API response")? .context("failed to get GitHub API response")?
.json::<Vec<Release>>() .json::<Vec<Release>>()
.await .await
.context("failed to parse GitHub API response")? .context("failed to parse GitHub API response")?;
releases
.into_iter() .into_iter()
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok()); .map(|release| Version::parse(release.tag_name.trim_start_matches('v')).unwrap())
.max()
match ty {
VersionType::Latest => releases.max(),
VersionType::Specific(version) => {
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
}
}
.context("failed to find latest version") .context("failed to find latest version")
} }
pub fn no_build_metadata(version: &Version) -> Version {
let mut version = version.clone();
version.build = semver::BuildMetadata::EMPTY;
version
}
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6); const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
#[instrument(skip(reqwest), level = "trace")]
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> { pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let config = read_config().await?; let config = read_config().await?;
@ -100,11 +73,9 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
.last_checked_updates .last_checked_updates
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL) .filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
{ {
tracing::debug!("using cached version");
version version
} else { } else {
tracing::debug!("checking for updates"); let version = get_latest_remote_version(reqwest).await?;
let version = get_remote_version(reqwest, VersionType::Latest).await?;
write_config(&CliConfig { write_config(&CliConfig {
last_checked_updates: Some((chrono::Utc::now(), version.clone())), last_checked_updates: Some((chrono::Utc::now(), version.clone())),
@ -115,18 +86,14 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
version version
}; };
let current_version = current_version(); let current_version = current_version();
let version_no_metadata = no_build_metadata(&version);
if version_no_metadata <= current_version {
return Ok(());
}
if version > current_version {
let name = env!("CARGO_BIN_NAME"); let name = env!("CARGO_BIN_NAME");
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY")); let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"),);
let unformatted_messages = [ let unformatted_messages = [
"".to_string(), "".to_string(),
format!("update available! {current_version}{version_no_metadata}"), format!("update available! {current_version}{version}"),
format!("changelog: {changelog}"), format!("changelog: {changelog}"),
format!("run `{name} self-upgrade` to upgrade"), format!("run `{name} self-upgrade` to upgrade"),
"".to_string(), "".to_string(),
@ -146,7 +113,7 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
format!( format!(
"update available! {} → {}", "update available! {} → {}",
current_version.to_string().red(), current_version.to_string().red(),
version_no_metadata.to_string().green() version.to_string().green()
), ),
format!("changelog: {}", changelog.blue()), format!("changelog: {}", changelog.blue()),
format!( format!(
@ -176,16 +143,15 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
let br = "".bright_magenta(); let br = "".bright_magenta();
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n"); println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
}
Ok(()) Ok(())
} }
#[instrument(skip(reqwest, writer), level = "trace")] pub async fn download_github_release(
pub async fn download_github_release<W: AsyncWrite + Unpin>(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
version: &Version, version: &Version,
mut writer: W, ) -> anyhow::Result<Vec<u8>> {
) -> anyhow::Result<()> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
let release = reqwest let release = reqwest
@ -236,22 +202,19 @@ pub async fn download_github_release<W: AsyncWrite + Unpin>(
.context("archive has no entry")? .context("archive has no entry")?
.context("failed to get first archive entry")?; .context("failed to get first archive entry")?;
tokio::io::copy(&mut entry, &mut writer) let mut result = Vec::new();
entry
.read_to_end(&mut result)
.await .await
.context("failed to write archive entry to file") .context("failed to read archive entry bytes")?;
.map(|_| ())
Ok(result)
} }
#[derive(Debug)]
pub enum TagInfo {
Complete(Version),
Incomplete(Version),
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_or_download_version( pub async fn get_or_download_version(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
tag: &TagInfo, version: &Version,
always_give_path: bool, always_give_path: bool,
) -> anyhow::Result<Option<PathBuf>> { ) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions"); let path = home_dir()?.join("versions");
@ -259,23 +222,11 @@ pub async fn get_or_download_version(
.await .await
.context("failed to create versions directory")?; .context("failed to create versions directory")?;
let version = match tag { let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
TagInfo::Complete(version) => version,
// don't fetch the version since it could be cached
TagInfo::Incomplete(version) => version,
};
let path = path.join(format!(
"{}{}",
no_build_metadata(version),
std::env::consts::EXE_SUFFIX
));
let is_requested_version = !always_give_path && *version == current_version(); let is_requested_version = !always_give_path && *version == current_version();
if path.exists() { if path.exists() {
tracing::debug!("version already exists");
return Ok(if is_requested_version { return Ok(if is_requested_version {
None None
} else { } else {
@ -284,29 +235,14 @@ pub async fn get_or_download_version(
} }
if is_requested_version { if is_requested_version {
tracing::debug!("copying current executable to version directory");
fs::copy(current_exe()?, &path) fs::copy(current_exe()?, &path)
.await .await
.context("failed to copy current executable to version directory")?; .context("failed to copy current executable to version directory")?;
} else { } else {
let version = match tag { let bytes = download_github_release(reqwest, version).await?;
TagInfo::Complete(version) => version.clone(), fs::write(&path, bytes)
TagInfo::Incomplete(version) => {
get_remote_version(reqwest, VersionType::Specific(version.clone()))
.await .await
.context("failed to get remote version")? .context("failed to write downloaded version file")?;
}
};
tracing::debug!("downloading version");
download_github_release(
reqwest,
&version,
fs::File::create(&path)
.await
.context("failed to create version file")?,
)
.await?;
} }
make_executable(&path) make_executable(&path)
@ -320,7 +256,6 @@ pub async fn get_or_download_version(
}) })
} }
#[instrument(level = "trace")]
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> { pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir().await?.join(format!( let bin_exe_path = bin_dir().await?.join(format!(
"{}{}", "{}{}",

View file

@ -13,7 +13,6 @@ use std::{
collections::HashSet, collections::HashSet,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use tracing::{instrument, Instrument};
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>; type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
@ -24,7 +23,6 @@ pub(crate) type MultithreadDownloadJob = (
impl Project { impl Project {
/// Downloads a graph of dependencies /// Downloads a graph of dependencies
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
pub async fn download_graph( pub async fn download_graph(
&self, &self,
graph: &DependencyGraph, graph: &DependencyGraph,
@ -71,20 +69,13 @@ impl Project {
let version_id = version_id.clone(); let version_id = version_id.clone();
let node = node.clone(); let node = node.clone();
let span = tracing::info_span!(
"download",
name = name.to_string(),
version_id = version_id.to_string()
);
let project = project.clone(); let project = project.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
let downloaded_graph = downloaded_graph.clone(); let downloaded_graph = downloaded_graph.clone();
let package_dir = self.package_dir().to_path_buf(); let package_dir = self.package_dir().to_path_buf();
tokio::spawn( tokio::spawn(async move {
async move {
let source = node.pkg_ref.source(); let source = node.pkg_ref.source();
let container_folder = node.container_folder( let container_folder = node.container_folder(
@ -107,7 +98,7 @@ impl Project {
let project = project.clone(); let project = project.clone();
tracing::debug!("downloading"); log::debug!("downloading {name}@{version_id}");
let (fs, target) = let (fs, target) =
match source.download(&node.pkg_ref, &project, &reqwest).await { match source.download(&node.pkg_ref, &project, &reqwest).await {
@ -118,7 +109,7 @@ impl Project {
} }
}; };
tracing::debug!("downloaded"); log::debug!("downloaded {name}@{version_id}");
if write { if write {
if !prod || node.resolved_ty != DependencyType::Dev { if !prod || node.resolved_ty != DependencyType::Dev {
@ -132,9 +123,7 @@ impl Project {
} }
}; };
} else { } else {
tracing::debug!( log::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode");
"skipping write to disk, dev dependency in prod mode"
);
} }
} }
@ -149,9 +138,7 @@ impl Project {
} }
tx.send(Ok(display_name)).await.unwrap(); tx.send(Ok(display_name)).await.unwrap();
} });
.instrument(span),
);
} }
} }

View file

@ -11,7 +11,6 @@ use std::{
sync::{Arc, Mutex as StdMutex}, sync::{Arc, Mutex as StdMutex},
}; };
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::{instrument, Instrument};
/// Filters a graph to only include production dependencies, if `prod` is `true` /// Filters a graph to only include production dependencies, if `prod` is `true`
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph { pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
@ -34,16 +33,8 @@ pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
.collect() .collect()
} }
/// Receiver for dependencies downloaded and linked
pub type DownloadAndLinkReceiver =
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
impl Project { impl Project {
/// Downloads a graph of dependencies and links them in the correct order /// Downloads a graph of dependencies and links them in the correct order
#[instrument(
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
level = "debug"
)]
pub async fn download_and_link< pub async fn download_and_link<
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static, F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
R: Future<Output = Result<(), E>> + Send, R: Future<Output = Result<(), E>> + Send,
@ -58,7 +49,9 @@ impl Project {
pesde_cb: F, pesde_cb: F,
) -> Result< ) -> Result<
( (
DownloadAndLinkReceiver, tokio::sync::mpsc::Receiver<
Result<String, crate::download::errors::DownloadGraphError>,
>,
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>, impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
), ),
errors::DownloadAndLinkError<E>, errors::DownloadAndLinkError<E>,
@ -85,7 +78,6 @@ impl Project {
// step 1. download pesde dependencies // step 1. download pesde dependencies
let (mut pesde_rx, pesde_graph) = this let (mut pesde_rx, pesde_graph) = this
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false) .download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
.instrument(tracing::debug_span!("download (pesde)"))
.await?; .await?;
while let Some(result) = pesde_rx.recv().await { while let Some(result) = pesde_rx.recv().await {
@ -97,7 +89,6 @@ impl Project {
// step 2. link pesde dependencies. do so without types // step 2. link pesde dependencies. do so without types
if write { if write {
this.link_dependencies(&filter_graph(&pesde_graph, prod), false) this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
.instrument(tracing::debug_span!("link (pesde)"))
.await?; .await?;
} }
@ -112,7 +103,6 @@ impl Project {
// step 3. download wally dependencies // step 3. download wally dependencies
let (mut wally_rx, wally_graph) = this let (mut wally_rx, wally_graph) = this
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true) .download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
.instrument(tracing::debug_span!("download (wally)"))
.await?; .await?;
while let Some(result) = wally_rx.recv().await { while let Some(result) = wally_rx.recv().await {
@ -142,7 +132,6 @@ impl Project {
// step 4. link ALL dependencies. do so with types // step 4. link ALL dependencies. do so with types
if write { if write {
this.link_dependencies(&filter_graph(&graph, prod), true) this.link_dependencies(&filter_graph(&graph, prod), true)
.instrument(tracing::debug_span!("link (all)"))
.await?; .await?;
} }

View file

@ -14,10 +14,8 @@ use futures::{future::try_join_all, Stream};
use gix::sec::identity::Account; use gix::sec::identity::Account;
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fmt::Debug,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tracing::instrument;
use wax::Pattern; use wax::Pattern;
/// Downloading packages /// Downloading packages
@ -151,35 +149,29 @@ impl Project {
} }
/// Read the manifest file /// Read the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> { pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(string) Ok(string)
} }
// TODO: cache the manifest
/// Deserialize the manifest file /// Deserialize the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> { pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the manifest file /// Write the manifest file
#[instrument(skip(self, manifest), level = "debug")]
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> { pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
} }
/// Deserialize the lockfile /// Deserialize the lockfile
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> { pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the lockfile /// Write the lockfile
#[instrument(skip(self, lockfile), level = "debug")]
pub async fn write_lockfile( pub async fn write_lockfile(
&self, &self,
lockfile: Lockfile, lockfile: Lockfile,
@ -190,8 +182,7 @@ impl Project {
} }
/// Get the workspace members /// Get the workspace members
#[instrument(skip(self), level = "debug")] pub async fn workspace_members<P: AsRef<Path>>(
pub async fn workspace_members<P: AsRef<Path> + Debug>(
&self, &self,
dir: P, dir: P,
can_ref_self: bool, can_ref_self: bool,
@ -231,16 +222,7 @@ impl Project {
} }
/// Gets all matching paths in a directory /// Gets all matching paths in a directory
#[deprecated( pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>(
since = "0.5.0-rc.13",
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
)]
#[instrument(ret, level = "trace")]
pub async fn matching_globs_old_behaviour<
'a,
P: AsRef<Path> + Debug,
I: IntoIterator<Item = &'a str> + Debug,
>(
dir: P, dir: P,
globs: I, globs: I,
relative: bool, relative: bool,
@ -288,7 +270,7 @@ pub async fn matching_globs_old_behaviour<
is_entire_dir_included || is_filename_match, is_entire_dir_included || is_filename_match,
)); ));
if is_filename_match { if is_filename_match {
tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display()); log::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
} }
} }
@ -311,8 +293,7 @@ pub async fn matching_globs_old_behaviour<
} }
/// Gets all matching paths in a directory /// Gets all matching paths in a directory
#[instrument(ret, level = "trace")] pub async fn matching_globs<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>(
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
dir: P, dir: P,
globs: I, globs: I,
relative: bool, relative: bool,

View file

@ -117,10 +117,10 @@ pub fn get_lib_require_path(
) -> Result<String, errors::GetLibRequirePath> { ) -> Result<String, errors::GetLibRequirePath> {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap(); let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
let path = if use_new_structure { let path = if use_new_structure {
tracing::debug!("using new structure for require path with {lib_file:?}"); log::debug!("using new structure for require path with {:?}", lib_file);
lib_file.to_path(path) lib_file.to_path(path)
} else { } else {
tracing::debug!("using old structure for require path with {lib_file:?}"); log::debug!("using old structure for require path with {:?}", lib_file);
path path
}; };

View file

@ -20,7 +20,6 @@ use std::{
sync::Arc, sync::Arc,
}; };
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::{instrument, Instrument};
/// Generates linking modules for a project /// Generates linking modules for a project
pub mod generator; pub mod generator;
@ -45,7 +44,6 @@ async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std:
impl Project { impl Project {
/// Links the dependencies of the project /// Links the dependencies of the project
#[instrument(skip(self, graph), level = "debug")]
pub async fn link_dependencies( pub async fn link_dependencies(
&self, &self,
graph: &DownloadedGraph, graph: &DownloadedGraph,
@ -57,7 +55,7 @@ impl Project {
// step 1. link all non-wally packages (and their dependencies) temporarily without types // step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed // we do this separately to allow the required tools for the scripts to be installed
self.link(graph, &manifest, &Arc::new(Default::default()), false) self.link(graph, &manifest, &Arc::new(Default::default()))
.await?; .await?;
if !with_types { if !with_types {
@ -112,7 +110,7 @@ impl Project {
} }
}; };
tracing::debug!("contains {} exported types", types.len()); log::debug!("{name}@{version_id} has {} exported types", types.len());
types types
} else { } else {
@ -124,8 +122,8 @@ impl Project {
.and_then(|t| t.build_files()) .and_then(|t| t.build_files())
{ {
let Some(script_path) = roblox_sync_config_gen_script else { let Some(script_path) = roblox_sync_config_gen_script else {
tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator); log::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
return Ok((version_id, types)); return Ok((version_id, vec![]));
}; };
execute_script( execute_script(
@ -145,7 +143,7 @@ impl Project {
} }
Ok((version_id, types)) Ok((version_id, types))
}.instrument(tracing::debug_span!("extract types", name = name.to_string(), version_id = version_id.to_string())))) }))
.await? .await?
.into_iter() .into_iter()
.collect::<HashMap<_, _>>(), .collect::<HashMap<_, _>>(),
@ -156,8 +154,7 @@ impl Project {
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
// step 3. link all packages (and their dependencies), this time with types // step 3. link all packages (and their dependencies), this time with types
self.link(graph, &manifest, &Arc::new(package_types), true) self.link(graph, &manifest, &Arc::new(package_types)).await
.await
} }
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
@ -246,7 +243,6 @@ impl Project {
graph: &DownloadedGraph, graph: &DownloadedGraph,
manifest: &Arc<Manifest>, manifest: &Arc<Manifest>,
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>, package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
is_complete: bool,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
try_join_all(graph.iter().flat_map(|(name, versions)| { try_join_all(graph.iter().flat_map(|(name, versions)| {
versions.iter().map(|(version_id, node)| { versions.iter().map(|(version_id, node)| {
@ -254,12 +250,6 @@ impl Project {
let manifest = manifest.clone(); let manifest = manifest.clone();
let package_types = package_types.clone(); let package_types = package_types.clone();
let span = tracing::info_span!(
"link",
name = name.to_string(),
version_id = version_id.to_string()
);
async move { async move {
let (node_container_folder, node_packages_folder) = { let (node_container_folder, node_packages_folder) = {
let base_folder = create_and_canonicalize( let base_folder = create_and_canonicalize(
@ -301,14 +291,10 @@ impl Project {
.get(dependency_name) .get(dependency_name)
.and_then(|v| v.get(dependency_version_id)) .and_then(|v| v.get(dependency_version_id))
else { else {
if is_complete {
return Err(errors::LinkingError::DependencyNotFound( return Err(errors::LinkingError::DependencyNotFound(
format!("{dependency_name}@{dependency_version_id}"), dependency_name.to_string(),
format!("{name}@{version_id}"), dependency_version_id.to_string(),
)); ));
}
continue;
}; };
let base_folder = create_and_canonicalize( let base_folder = create_and_canonicalize(
@ -352,7 +338,6 @@ impl Project {
Ok(()) Ok(())
} }
.instrument(span)
}) })
})) }))
.await .await
@ -377,7 +362,7 @@ pub mod errors {
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
/// A dependency was not found /// A dependency was not found
#[error("dependency `{0}` of `{1}` not found")] #[error("dependency not found: {0}@{1}")]
DependencyNotFound(String, String), DependencyNotFound(String, String),
/// The library file was not found /// The library file was not found

View file

@ -14,7 +14,7 @@ use relative_path::RelativePathBuf;
use semver::Version; use semver::Version;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap, collections::{btree_map::Entry, BTreeMap},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -32,9 +32,6 @@ pub struct DependencyGraphNode {
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>, pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency /// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType, pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub is_peer: bool,
/// The package reference /// The package reference
pub pkg_ref: PackageRefs, pub pkg_ref: PackageRefs,
} }
@ -77,6 +74,45 @@ impl DependencyGraphNode {
/// A graph of `DependencyGraphNode`s /// A graph of `DependencyGraphNode`s
pub type DependencyGraph = Graph<DependencyGraphNode>; pub type DependencyGraph = Graph<DependencyGraphNode>;
pub(crate) fn insert_node(
graph: &mut DependencyGraph,
name: PackageNames,
version: VersionId,
mut node: DependencyGraphNode,
is_top_level: bool,
) {
if !is_top_level && node.direct.take().is_some() {
log::debug!(
"tried to insert {name}@{version} as direct dependency from a non top-level context",
);
}
match graph
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => {
entry.insert(node);
}
Entry::Occupied(existing) => {
let current_node = existing.into_mut();
match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => {
log::warn!("duplicate direct dependency for {name}@{version}");
}
(None, Some(_)) => {
current_node.direct = node.direct;
}
(_, _) => {}
}
}
}
}
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target` /// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DownloadedDependencyGraphNode { pub struct DownloadedDependencyGraphNode {

View file

@ -1,20 +1,17 @@
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo}; use crate::cli::version::{check_for_updates, get_or_download_version};
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR}; use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context; use anyhow::Context;
use clap::{builder::styling::AnsiColor, Parser}; use clap::{builder::styling::AnsiColor, Parser};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper;
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME}; use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{ use std::{
collections::HashSet, collections::HashSet,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use tracing::instrument;
use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
use tracing_subscriber::{
filter::LevelFilter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer,
};
mod cli; mod cli;
pub mod util; pub mod util;
@ -41,7 +38,6 @@ struct Cli {
subcommand: cli::commands::Subcommand, subcommand: cli::commands::Subcommand,
} }
#[instrument(level = "trace")]
async fn get_linkable_dir(path: &Path) -> PathBuf { async fn get_linkable_dir(path: &Path) -> PathBuf {
let mut curr_path = PathBuf::new(); let mut curr_path = PathBuf::new();
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file"); let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
@ -72,7 +68,7 @@ async fn get_linkable_dir(path: &Path) -> PathBuf {
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() { if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
if let Err(err) = fs::remove_file(&try_path).await { if let Err(err) = fs::remove_file(&try_path).await {
tracing::warn!( log::warn!(
"failed to remove temporary file at {}: {err}", "failed to remove temporary file at {}: {err}",
try_path.display() try_path.display()
); );
@ -133,39 +129,6 @@ async fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap()); std::process::exit(status.code().unwrap());
} }
let indicatif_layer = IndicatifLayer::new().with_filter(IndicatifFilter::new(false));
let tracing_env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy()
.add_directive("reqwest=info".parse().unwrap())
.add_directive("rustls=info".parse().unwrap())
.add_directive("tokio_util=info".parse().unwrap())
.add_directive("goblin=info".parse().unwrap())
.add_directive("tower=info".parse().unwrap())
.add_directive("hyper=info".parse().unwrap())
.add_directive("h2=info".parse().unwrap());
let fmt_layer =
tracing_subscriber::fmt::layer().with_writer(indicatif_layer.inner().get_stderr_writer());
#[cfg(debug_assertions)]
let fmt_layer = fmt_layer.with_timer(tracing_subscriber::fmt::time::uptime());
#[cfg(not(debug_assertions))]
let fmt_layer = fmt_layer
.pretty()
.with_timer(())
.with_line_number(false)
.with_file(false)
.with_target(false);
tracing_subscriber::registry()
.with(tracing_env_filter)
.with(fmt_layer)
.with(indicatif_layer)
.init();
let (project_root_dir, project_workspace_dir) = 'finder: { let (project_root_dir, project_workspace_dir) = 'finder: {
let mut current_path = Some(cwd.clone()); let mut current_path = Some(cwd.clone());
let mut project_root = None::<PathBuf>; let mut project_root = None::<PathBuf>;
@ -228,13 +191,16 @@ async fn run() -> anyhow::Result<()> {
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir) (project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
}; };
tracing::trace!( let multi = {
"project root: {}\nworkspace root: {}", let logger = pretty_env_logger::formatted_builder()
project_root_dir.display(), .parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"))
project_workspace_dir .build();
.as_ref() let multi = MultiProgress::new();
.map_or("none".to_string(), |p| p.display().to_string())
); LogWrapper::new(multi.clone(), logger).try_init().unwrap();
multi
};
let home_dir = home_dir()?; let home_dir = home_dir()?;
let data_dir = home_dir.join("data"); let data_dir = home_dir.join("data");
@ -251,7 +217,7 @@ async fn run() -> anyhow::Result<()> {
} }
.join("cas"); .join("cas");
tracing::debug!("using cas dir in {}", cas_dir.display()); log::debug!("using cas dir in {}", cas_dir.display());
let project = Project::new( let project = Project::new(
project_root_dir, project_root_dir,
@ -290,7 +256,7 @@ async fn run() -> anyhow::Result<()> {
.and_then(|manifest| manifest.pesde_version); .and_then(|manifest| manifest.pesde_version);
let exe_path = if let Some(version) = target_version { let exe_path = if let Some(version) = target_version {
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await? get_or_download_version(&reqwest, &version, false).await?
} else { } else {
None None
}; };
@ -312,7 +278,7 @@ async fn run() -> anyhow::Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
cli.subcommand.run(project, reqwest).await cli.subcommand.run(project, multi, reqwest).await
} }
#[tokio::main] #[tokio::main]

View file

@ -1,13 +1,13 @@
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use crate::{ use crate::{
manifest::{overrides::OverrideKey, target::Target}, manifest::{overrides::OverrideKey, target::Target},
names::PackageName, names::PackageName,
source::specifiers::DependencySpecifiers, source::specifiers::DependencySpecifiers,
}; };
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use tracing::instrument;
/// Overrides /// Overrides
pub mod overrides; pub mod overrides;
@ -107,7 +107,6 @@ pub enum DependencyType {
impl Manifest { impl Manifest {
/// Get all dependencies from the manifest /// Get all dependencies from the manifest
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies( pub fn all_dependencies(
&self, &self,
) -> Result< ) -> Result<

View file

@ -35,16 +35,8 @@ impl FromStr for PackageName {
.ok_or(Self::Err::InvalidFormat(s.to_string()))?; .ok_or(Self::Err::InvalidFormat(s.to_string()))?;
for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] { for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] {
let min_len = match reason { if part.len() < 3 || part.len() > 32 {
ErrorReason::Scope => 3, return Err(Self::Err::InvalidLength(reason, part.to_string()));
ErrorReason::Name => 1,
};
if !(min_len..=32).contains(&part.len()) {
return Err(match reason {
ErrorReason::Scope => Self::Err::InvalidScopeLength(part.to_string()),
ErrorReason::Name => Self::Err::InvalidNameLength(part.to_string()),
});
} }
if part.chars().all(|c| c.is_ascii_digit()) { if part.chars().all(|c| c.is_ascii_digit()) {
@ -239,13 +231,9 @@ pub mod errors {
#[error("package {0} `{1}` starts or ends with an underscore")] #[error("package {0} `{1}` starts or ends with an underscore")]
PrePostfixUnderscore(ErrorReason, String), PrePostfixUnderscore(ErrorReason, String),
/// The package name's scope part is not within 3-32 characters long /// The package name is not within 3-32 characters long
#[error("package scope `{0}` is not within 3-32 characters long")] #[error("package {0} `{1}` is not within 3-32 characters long")]
InvalidScopeLength(String), InvalidLength(ErrorReason, String),
/// The package name's name part is not within 1-32 characters long
#[error("package name `{0}` is not within 1-32 characters long")]
InvalidNameLength(String),
} }
/// Errors that can occur when working with Wally package names /// Errors that can occur when working with Wally package names

View file

@ -3,7 +3,6 @@ use fs_err::tokio as fs;
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature}; use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::path::Path; use std::path::Path;
use tracing::instrument;
/// Set up a git repository for patches /// Set up a git repository for patches
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> { pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
@ -70,7 +69,6 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
impl Project { impl Project {
/// Apply patches to the project's dependencies /// Apply patches to the project's dependencies
#[instrument(skip(self, graph), level = "debug")]
pub async fn apply_patches( pub async fn apply_patches(
&self, &self,
graph: &DownloadedGraph, graph: &DownloadedGraph,
@ -99,7 +97,7 @@ impl Project {
.get(&name) .get(&name)
.and_then(|versions| versions.get(&version_id)) .and_then(|versions| versions.get(&version_id))
else { else {
tracing::warn!( log::warn!(
"patch for {name}@{version_id} not applied because it is not in the graph" "patch for {name}@{version_id} not applied because it is not in the graph"
); );
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap(); tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
@ -116,7 +114,7 @@ impl Project {
); );
tokio::spawn(async move { tokio::spawn(async move {
tracing::debug!("applying patch to {name}@{version_id}"); log::debug!("applying patch to {name}@{version_id}");
let patch = match fs::read(&patch_path).await { let patch = match fs::read(&patch_path).await {
Ok(patch) => patch, Ok(patch) => patch,
@ -197,9 +195,7 @@ impl Project {
} }
} }
tracing::debug!( log::debug!("patch applied to {name}@{version_id}, removing .git directory");
"patch applied to {name}@{version_id}, removing .git directory"
);
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await { if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e))) tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))

View file

@ -1,5 +1,5 @@
use crate::{ use crate::{
lockfile::{DependencyGraph, DependencyGraphNode}, lockfile::{insert_node, DependencyGraph, DependencyGraphNode},
manifest::DependencyType, manifest::DependencyType,
names::PackageNames, names::PackageNames,
source::{ source::{
@ -11,55 +11,10 @@ use crate::{
}, },
Project, DEFAULT_INDEX_NAME, Project, DEFAULT_INDEX_NAME,
}; };
use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque}; use std::collections::{HashMap, HashSet, VecDeque};
use tracing::{instrument, Instrument};
fn insert_node(
graph: &mut DependencyGraph,
name: PackageNames,
version: VersionId,
mut node: DependencyGraphNode,
is_top_level: bool,
) {
if !is_top_level && node.direct.take().is_some() {
tracing::debug!(
"tried to insert {name}@{version} as direct dependency from a non top-level context",
);
}
match graph
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => {
entry.insert(node);
}
Entry::Occupied(existing) => {
let current_node = existing.into_mut();
match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => {
tracing::warn!("duplicate direct dependency for {name}@{version}");
}
(None, Some(_)) => {
current_node.direct = node.direct;
}
(_, _) => {}
}
}
}
}
impl Project { impl Project {
/// Create a dependency graph from the project's manifest /// Create a dependency graph from the project's manifest
#[instrument(
skip(self, previous_graph, refreshed_sources),
ret(level = "trace"),
level = "debug"
)]
pub async fn dependency_graph( pub async fn dependency_graph(
&self, &self,
previous_graph: Option<&DependencyGraph>, previous_graph: Option<&DependencyGraph>,
@ -84,7 +39,7 @@ impl Project {
if let Some(previous_graph) = previous_graph { if let Some(previous_graph) = previous_graph {
for (name, versions) in previous_graph { for (name, versions) in previous_graph {
for (version, node) in versions { for (version, node) in versions {
let Some((old_alias, specifier, source_ty)) = &node.direct else { let Some((_, specifier, source_ty)) = &node.direct else {
// this is not a direct dependency, will be added if it's still being used later // this is not a direct dependency, will be added if it's still being used later
continue; continue;
}; };
@ -96,16 +51,13 @@ impl Project {
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
else { else {
tracing::debug!( log::debug!(
"dependency {name}@{version} (old alias {old_alias}) from old dependency graph is no longer in the manifest", "dependency {name}@{version} from old dependency graph is no longer in the manifest",
); );
continue; continue;
}; };
let span = tracing::info_span!("resolve from old graph", alias); log::debug!("resolved {}@{} from old dependency graph", name, version);
let _guard = span.enter();
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
insert_node( insert_node(
&mut graph, &mut graph,
name.clone(), name.clone(),
@ -120,24 +72,22 @@ impl Project {
let mut queue = node let mut queue = node
.dependencies .dependencies
.iter() .iter()
.map(|(name, (version, dep_alias))| { .map(|(name, (version, _))| (name, version, 0usize))
(
name,
version,
vec![alias.to_string(), dep_alias.to_string()],
)
})
.collect::<VecDeque<_>>(); .collect::<VecDeque<_>>();
while let Some((dep_name, dep_version, path)) = queue.pop_front() { while let Some((dep_name, dep_version, depth)) = queue.pop_front() {
let inner_span =
tracing::info_span!("resolve dependency", path = path.join(">"));
let _inner_guard = inner_span.enter();
if let Some(dep_node) = previous_graph if let Some(dep_node) = previous_graph
.get(dep_name) .get(dep_name)
.and_then(|v| v.get(dep_version)) .and_then(|v| v.get(dep_version))
{ {
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}"); log::debug!(
"{}resolved dependency {}@{} from {}@{}",
"\t".repeat(depth),
dep_name,
dep_version,
name,
version
);
insert_node( insert_node(
&mut graph, &mut graph,
dep_name.clone(), dep_name.clone(),
@ -149,20 +99,15 @@ impl Project {
dep_node dep_node
.dependencies .dependencies
.iter() .iter()
.map(|(name, (version, alias))| { .map(|(name, (version, _))| (name, version, depth + 1))
(
name,
version,
path.iter()
.cloned()
.chain(std::iter::once(alias.to_string()))
.collect(),
)
})
.for_each(|dep| queue.push_back(dep)); .for_each(|dep| queue.push_back(dep));
} else { } else {
tracing::warn!( log::warn!(
"dependency {dep_name}@{dep_version} not found in previous graph" "dependency {}@{} from {}@{} not found in previous graph",
dep_name,
dep_version,
name,
version
); );
} }
} }
@ -185,11 +130,14 @@ impl Project {
.collect::<VecDeque<_>>(); .collect::<VecDeque<_>>();
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() { while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
async {
let alias = path.last().unwrap().clone(); let alias = path.last().unwrap().clone();
let depth = path.len() - 1; let depth = path.len() - 1;
tracing::debug!("resolving {specifier} ({ty:?})"); log::debug!(
"{}resolving {specifier} from {}",
"\t".repeat(depth),
path.join(">")
);
let source = match &specifier { let source = match &specifier {
DependencySpecifiers::Pesde(specifier) => { DependencySpecifiers::Pesde(specifier) => {
let index_url = if !is_published_package && (depth == 0 || overridden) { let index_url = if !is_published_package && (depth == 0 || overridden) {
@ -295,8 +243,9 @@ impl Project {
.get_mut(&name) .get_mut(&name)
.and_then(|versions| versions.get_mut(&target_version_id)) .and_then(|versions| versions.get_mut(&target_version_id))
{ {
tracing::debug!( log::debug!(
"{}@{} already resolved", "{}{}@{} already resolved",
"\t".repeat(depth),
name, name,
target_version_id target_version_id
); );
@ -304,24 +253,23 @@ impl Project {
if std::mem::discriminant(&already_resolved.pkg_ref) if std::mem::discriminant(&already_resolved.pkg_ref)
!= std::mem::discriminant(pkg_ref) != std::mem::discriminant(pkg_ref)
{ {
tracing::warn!( log::warn!(
"resolved package {name}@{target_version_id} has a different source than previously resolved one, this may cause issues", "resolved package {name}@{target_version_id} has a different source than the previously resolved one at {}, this may cause issues",
path.join(">")
); );
} }
if already_resolved.resolved_ty == DependencyType::Peer { if already_resolved.resolved_ty == DependencyType::Peer
&& resolved_ty == DependencyType::Standard
{
already_resolved.resolved_ty = resolved_ty; already_resolved.resolved_ty = resolved_ty;
} }
if ty == DependencyType::Peer && depth == 0 {
already_resolved.is_peer = true;
}
if already_resolved.direct.is_none() && depth == 0 { if already_resolved.direct.is_none() && depth == 0 {
already_resolved.direct = Some((alias.clone(), specifier.clone(), ty)); already_resolved.direct = Some((alias.clone(), specifier.clone(), ty));
} }
return Ok(()); continue;
} }
let node = DependencyGraphNode { let node = DependencyGraphNode {
@ -333,11 +281,6 @@ impl Project {
pkg_ref: pkg_ref.clone(), pkg_ref: pkg_ref.clone(),
dependencies: Default::default(), dependencies: Default::default(),
resolved_ty, resolved_ty,
is_peer: if depth == 0 {
false
} else {
ty == DependencyType::Peer
},
}; };
insert_node( insert_node(
&mut graph, &mut graph,
@ -347,8 +290,9 @@ impl Project {
depth == 0, depth == 0,
); );
tracing::debug!( log::debug!(
"resolved {}@{} from new dependency graph", "{}resolved {}@{} from new dependency graph",
"\t".repeat(depth),
name, name,
target_version_id target_version_id
); );
@ -374,8 +318,9 @@ impl Project {
}); });
if overridden.is_some() { if overridden.is_some() {
tracing::debug!( log::debug!(
"overridden specifier found for {} ({dependency_spec})", "{}overridden specifier found for {} ({dependency_spec})",
"\t".repeat(depth),
path.iter() path.iter()
.map(|s| s.as_str()) .map(|s| s.as_str())
.chain(std::iter::once(dependency_alias.as_str())) .chain(std::iter::once(dependency_alias.as_str()))
@ -396,21 +341,12 @@ impl Project {
*target_version_id.target(), *target_version_id.target(),
)); ));
} }
Ok(())
}
.instrument(tracing::info_span!("resolve new/changed", path = path.join(">")))
.await?;
} }
for (name, versions) in &mut graph { for (name, versions) in &graph {
for (version_id, node) in versions { for (version_id, node) in versions {
if node.is_peer && node.direct.is_none() {
node.resolved_ty = DependencyType::Peer;
}
if node.resolved_ty == DependencyType::Peer { if node.resolved_ty == DependencyType::Peer {
tracing::warn!("peer dependency {name}@{version_id} was not resolved"); log::warn!("peer dependency {name}@{version_id} was not resolved");
} }
} }
} }

View file

@ -1,7 +1,7 @@
use crate::Project; use crate::Project;
use std::{ use std::{
ffi::OsStr, ffi::OsStr,
fmt::{Debug, Display, Formatter}, fmt::{Display, Formatter},
path::Path, path::Path,
process::Stdio, process::Stdio,
}; };
@ -9,7 +9,6 @@ use tokio::{
io::{AsyncBufReadExt, BufReader}, io::{AsyncBufReadExt, BufReader},
process::Command, process::Command,
}; };
use tracing::instrument;
/// Script names used by pesde /// Script names used by pesde
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
@ -31,8 +30,7 @@ impl Display for ScriptName {
} }
} }
#[instrument(skip(project), level = "debug")] pub(crate) async fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<OsStr> + Debug>(
script_name: ScriptName, script_name: ScriptName,
script_path: &Path, script_path: &Path,
args: A, args: A,
@ -61,10 +59,10 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<O
while let Some(line) = stderr.next_line().await.transpose() { while let Some(line) = stderr.next_line().await.transpose() {
match line { match line {
Ok(line) => { Ok(line) => {
tracing::error!("[{script}]: {line}"); log::error!("[{script}]: {line}");
} }
Err(e) => { Err(e) => {
tracing::error!("ERROR IN READING STDERR OF {script}: {e}"); log::error!("ERROR IN READING STDERR OF {script}: {e}");
break; break;
} }
} }
@ -80,11 +78,11 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<O
stdout_str.push_str(&line); stdout_str.push_str(&line);
stdout_str.push('\n'); stdout_str.push('\n');
} else { } else {
tracing::info!("[{script_2}]: {line}"); log::info!("[{script_2}]: {line}");
} }
} }
Err(e) => { Err(e) => {
tracing::error!("ERROR IN READING STDOUT OF {script_2}: {e}"); log::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
break; break;
} }
} }
@ -97,7 +95,7 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<O
} }
} }
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
tracing::warn!("Lune could not be found in PATH: {e}"); log::warn!("Lune could not be found in PATH: {e}");
Ok(None) Ok(None)
} }

View file

@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
fmt::Debug,
future::Future, future::Future,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -18,7 +17,6 @@ use tokio::{
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
pin, pin,
}; };
use tracing::instrument;
/// A file system entry /// A file system entry
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -127,8 +125,7 @@ pub(crate) async fn store_in_cas<
impl PackageFS { impl PackageFS {
/// Write the package to the given destination /// Write the package to the given destination
#[instrument(skip(self), level = "debug")] pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
&self, &self,
destination: P, destination: P,
cas_path: Q, cas_path: Q,
@ -214,8 +211,7 @@ impl PackageFS {
} }
/// Returns the contents of the file with the given hash /// Returns the contents of the file with the given hash
#[instrument(skip(self), ret(level = "trace"), level = "debug")] pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>(
pub async fn read_file<P: AsRef<Path> + Debug, H: AsRef<str> + Debug>(
&self, &self,
file_hash: H, file_hash: H,
cas_path: P, cas_path: P,

View file

@ -27,7 +27,6 @@ use std::{
sync::Arc, sync::Arc,
}; };
use tokio::{sync::Mutex, task::spawn_blocking}; use tokio::{sync::Mutex, task::spawn_blocking};
use tracing::instrument;
/// The Git package reference /// The Git package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -71,12 +70,10 @@ impl PackageSource for GitPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -332,7 +329,6 @@ impl PackageSource for GitPackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -347,7 +343,7 @@ impl PackageSource for GitPackageSource {
match fs::read_to_string(&index_file).await { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
tracing::debug!( log::debug!(
"using cached index file for package {}#{}", "using cached index file for package {}#{}",
pkg_ref.repo, pkg_ref.repo,
pkg_ref.tree_id pkg_ref.tree_id
@ -491,7 +487,7 @@ impl PackageSource for GitPackageSource {
} }
if pkg_ref.use_new_structure() && name == "default.project.json" { if pkg_ref.use_new_structure() && name == "default.project.json" {
tracing::debug!( log::debug!(
"removing default.project.json from {}#{} at {path} - using new structure", "removing default.project.json from {}#{} at {path} - using new structure",
pkg_ref.repo, pkg_ref.repo,
pkg_ref.tree_id pkg_ref.tree_id

View file

@ -1,11 +1,8 @@
#![allow(async_fn_in_trait)] #![allow(async_fn_in_trait)]
use crate::{util::authenticate_conn, Project}; use crate::{util::authenticate_conn, Project};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use gix::remote::Direction; use gix::remote::Direction;
use std::fmt::Debug;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::instrument;
/// A trait for sources that are based on Git repositories /// A trait for sources that are based on Git repositories
pub trait GitBasedSource { pub trait GitBasedSource {
@ -93,11 +90,7 @@ pub trait GitBasedSource {
} }
/// Reads a file from a tree /// Reads a file from a tree
#[instrument(skip(tree), ret, level = "trace")] pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix::bstr::BStr>>(
pub fn read_file<
I: IntoIterator<Item = P> + Clone + Debug,
P: ToString + PartialEq<gix::bstr::BStr>,
>(
tree: &gix::Tree, tree: &gix::Tree,
file_path: I, file_path: I,
) -> Result<Option<String>, errors::ReadFile> { ) -> Result<Option<String>, errors::ReadFile> {
@ -127,7 +120,6 @@ pub fn read_file<
} }
/// Gets the root tree of a repository /// Gets the root tree of a repository
#[instrument(skip(repo), level = "trace")]
pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> { pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> {
// this is a bare repo, so this is the actual path // this is a bare repo, so this is the actual path
let path = repo.path().to_path_buf(); let path = repo.path().to_path_buf();

View file

@ -30,7 +30,6 @@ use crate::{
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::StreamExt; use futures::StreamExt;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::instrument;
/// The pesde package reference /// The pesde package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -74,7 +73,6 @@ impl PesdePackageSource {
} }
/// Reads the config file /// Reads the config file
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> { pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
let repo_url = self.repo_url.clone(); let repo_url = self.repo_url.clone();
let path = self.path(project); let path = self.path(project);
@ -101,12 +99,10 @@ impl PackageSource for PesdePackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -128,10 +124,10 @@ impl PackageSource for PesdePackageSource {
} }
}; };
let IndexFile { entries, .. } = toml::from_str(&string) let entries: IndexFile = toml::from_str(&string)
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?; .map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
tracing::debug!("{} has {} possible entries", specifier.name, entries.len()); log::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok(( Ok((
PackageNames::Pesde(specifier.name.clone()), PackageNames::Pesde(specifier.name.clone()),
@ -159,7 +155,6 @@ impl PackageSource for PesdePackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -176,7 +171,7 @@ impl PackageSource for PesdePackageSource {
match fs::read_to_string(&index_file).await { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
tracing::debug!( log::debug!(
"using cached index file for package {}@{} {}", "using cached index file for package {}@{} {}",
pkg_ref.name, pkg_ref.name,
pkg_ref.version, pkg_ref.version,
@ -197,7 +192,7 @@ impl PackageSource for PesdePackageSource {
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream"); let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) { if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
tracing::debug!("using token for {}", self.repo_url); log::debug!("using token for {}", self.repo_url);
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }
@ -279,35 +274,28 @@ impl Default for AllowedRegistries {
} }
} }
// strips .git suffix to allow for more flexible matching
fn simplify_url(mut url: Url) -> Url {
url.path = url.path.strip_suffix(b".git").unwrap_or(&url.path).into();
url
}
impl AllowedRegistries { impl AllowedRegistries {
fn _is_allowed(&self, url: &Url) -> bool {
match self {
Self::All(all) => *all,
Self::Specific(urls) => urls.contains(url),
}
}
/// Whether the given URL is allowed /// Whether the given URL is allowed
pub fn is_allowed(&self, url: Url) -> bool { pub fn is_allowed(&self, mut this: Url, mut external: Url) -> bool {
self._is_allowed(&simplify_url(url)) // strip .git suffix to allow for more flexible matching
} this.path = this.path.strip_suffix(b".git").unwrap_or(&this.path).into();
external.path = external
.path
.strip_suffix(b".git")
.unwrap_or(&external.path)
.into();
/// Whether the given URL is allowed, or is the same as the given URL this == external
pub fn is_allowed_or_same(&self, this: Url, external: Url) -> bool { || (match self {
let this = simplify_url(this); Self::All(all) => *all,
let external = simplify_url(external); Self::Specific(urls) => urls.contains(&this) || urls.contains(&external),
(this == external) || self._is_allowed(&external) || self._is_allowed(&this) })
} }
} }
/// The configuration for the pesde index /// The configuration for the pesde index
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct IndexConfig { pub struct IndexConfig {
/// The URL of the API /// The URL of the API
pub api: url::Url, pub api: url::Url,
@ -315,22 +303,22 @@ pub struct IndexConfig {
pub download: Option<String>, pub download: Option<String>,
/// Whether Git is allowed as a source for publishing packages /// Whether Git is allowed as a source for publishing packages
#[serde(default)] #[serde(default)]
pub git_allowed: AllowedRegistries, pub git_allowed: bool,
/// Whether other registries are allowed as a source for publishing packages /// Whether other registries are allowed as a source for publishing packages
#[serde(default)] #[serde(default)]
pub other_registries_allowed: AllowedRegistries, pub other_registries_allowed: AllowedRegistries,
/// Whether Wally is allowed as a source for publishing packages /// Whether Wally is allowed as a source for publishing packages
#[serde(default)] #[serde(default)]
pub wally_allowed: AllowedRegistries, pub wally_allowed: bool,
/// The OAuth client ID for GitHub /// The OAuth client ID for GitHub
#[serde(default)] #[serde(default)]
pub github_oauth_client_id: Option<String>, pub github_oauth_client_id: Option<String>,
/// The maximum size of an archive in bytes /// The maximum size of an archive in bytes
#[serde(default = "default_archive_size")] #[serde(default = "default_archive_size")]
pub max_archive_size: usize, pub max_archive_size: usize,
/// The packages to display in the CLI for default script implementations /// The package to use for default script implementations
#[serde(default)] #[serde(default)]
pub scripts_packages: Vec<PackageName>, pub scripts_package: Option<PackageName>,
} }
impl IndexConfig { impl IndexConfig {
@ -432,20 +420,8 @@ pub struct IndexFileEntry {
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>, pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
} }
/// The package metadata in the index file
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub struct IndexMetadata {}
/// The index file for a package /// The index file for a package
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub type IndexFile = BTreeMap<VersionId, IndexFileEntry>;
pub struct IndexFile {
/// Any package-wide metadata
#[serde(default, skip_serializing_if = "crate::util::is_default")]
pub meta: IndexMetadata,
/// The entries in the index file
#[serde(flatten)]
pub entries: BTreeMap<VersionId, IndexFileEntry>,
}
/// Errors that can occur when interacting with the pesde package source /// Errors that can occur when interacting with the pesde package source
pub mod errors { pub mod errors {

View file

@ -11,7 +11,6 @@ use crate::{
Project, LINK_LIB_NO_FILE_FOUND, Project, LINK_LIB_NO_FILE_FOUND,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use tracing::instrument;
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -20,8 +19,7 @@ struct SourcemapNode {
file_paths: Vec<RelativePathBuf>, file_paths: Vec<RelativePathBuf>,
} }
#[instrument(skip(project, package_dir), level = "debug")] pub(crate) async fn find_lib_path(
async fn find_lib_path(
project: &Project, project: &Project,
package_dir: &Path, package_dir: &Path,
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> { ) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
@ -31,7 +29,7 @@ async fn find_lib_path(
.scripts .scripts
.get(&ScriptName::SourcemapGenerator.to_string()) .get(&ScriptName::SourcemapGenerator.to_string())
else { else {
tracing::warn!("no sourcemap generator script found in manifest"); log::warn!("no sourcemap generator script found in manifest");
return Ok(None); return Ok(None);
}; };
@ -57,7 +55,6 @@ async fn find_lib_path(
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml"; pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
#[instrument(skip(project, tempdir), level = "debug")]
pub(crate) async fn get_target( pub(crate) async fn get_target(
project: &Project, project: &Project,
tempdir: &TempDir, tempdir: &TempDir,

View file

@ -1,13 +1,13 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use semver::{Version, VersionReq};
use serde::{Deserialize, Deserializer};
use crate::{ use crate::{
manifest::{errors, DependencyType}, manifest::{errors, DependencyType},
names::wally::WallyPackageName, names::wally::WallyPackageName,
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier}, source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
}; };
use semver::{Version, VersionReq};
use serde::{Deserialize, Deserializer};
use tracing::instrument;
#[derive(Deserialize, Clone, Debug)] #[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
@ -63,7 +63,6 @@ pub struct WallyManifest {
impl WallyManifest { impl WallyManifest {
/// Get all dependencies from the manifest /// Get all dependencies from the manifest
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies( pub fn all_dependencies(
&self, &self,
) -> Result< ) -> Result<

View file

@ -30,7 +30,6 @@ use std::{
use tempfile::tempdir; use tempfile::tempdir;
use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking}; use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking};
use tokio_util::compat::FuturesAsyncReadCompatExt; use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::instrument;
pub(crate) mod compat_util; pub(crate) mod compat_util;
pub(crate) mod manifest; pub(crate) mod manifest;
@ -69,7 +68,6 @@ impl WallyPackageSource {
} }
/// Reads the config file /// Reads the config file
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> { pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> {
let repo_url = self.repo_url.clone(); let repo_url = self.repo_url.clone();
let path = self.path(project); let path = self.path(project);
@ -96,12 +94,10 @@ impl PackageSource for WallyPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -115,7 +111,7 @@ impl PackageSource for WallyPackageSource {
let string = match read_file(&tree, [scope, name]) { let string = match read_file(&tree, [scope, name]) {
Ok(Some(s)) => s, Ok(Some(s)) => s,
Ok(None) => { Ok(None) => {
tracing::debug!( log::debug!(
"{} not found in wally registry. searching in backup registries", "{} not found in wally registry. searching in backup registries",
specifier.name specifier.name
); );
@ -138,7 +134,7 @@ impl PackageSource for WallyPackageSource {
.await .await
{ {
Ok((name, results)) => { Ok((name, results)) => {
tracing::debug!("found {} in backup registry {registry}", name); log::debug!("found {} in backup registry {registry}", name);
return Ok((name, results)); return Ok((name, results));
} }
Err(errors::ResolveError::NotFound(_)) => { Err(errors::ResolveError::NotFound(_)) => {
@ -166,7 +162,7 @@ impl PackageSource for WallyPackageSource {
.collect::<Result<_, _>>() .collect::<Result<_, _>>()
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?; .map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
tracing::debug!("{} has {} possible entries", specifier.name, entries.len()); log::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok(( Ok((
PackageNames::Wally(specifier.name.clone()), PackageNames::Wally(specifier.name.clone()),
@ -196,7 +192,6 @@ impl PackageSource for WallyPackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -212,7 +207,7 @@ impl PackageSource for WallyPackageSource {
let tempdir = match fs::read_to_string(&index_file).await { let tempdir = match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
tracing::debug!( log::debug!(
"using cached index file for package {}@{}", "using cached index file for package {}@{}",
pkg_ref.name, pkg_ref.name,
pkg_ref.version pkg_ref.version
@ -245,7 +240,7 @@ impl PackageSource for WallyPackageSource {
); );
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) { if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
tracing::debug!("using token for {}", self.repo_url); log::debug!("using token for {}", self.repo_url);
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }

View file

@ -13,7 +13,6 @@ use relative_path::RelativePathBuf;
use reqwest::Client; use reqwest::Client;
use std::collections::{BTreeMap, HashSet}; use std::collections::{BTreeMap, HashSet};
use tokio::pin; use tokio::pin;
use tracing::instrument;
/// The workspace package reference /// The workspace package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -36,7 +35,6 @@ impl PackageSource for WorkspacePackageSource {
Ok(()) Ok(())
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -128,7 +126,6 @@ impl PackageSource for WorkspacePackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,

View file

@ -19,7 +19,7 @@ impl DependencySpecifier for WorkspaceDependencySpecifier {}
impl Display for WorkspaceDependencySpecifier { impl Display for WorkspaceDependencySpecifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@workspace:{}", self.name, self.version) write!(f, "workspace:{}{}", self.version, self.name)
} }
} }

View file

@ -83,7 +83,3 @@ pub fn deserialize_git_like_url<'de, D: Deserializer<'de>>(
pub fn hash<S: AsRef<[u8]>>(struc: S) -> String { pub fn hash<S: AsRef<[u8]>>(struc: S) -> String {
format!("{:x}", Sha256::digest(struc.as_ref())) format!("{:x}", Sha256::digest(struc.as_ref()))
} }
pub fn is_default<T: Default + Eq>(t: &T) -> bool {
t == &T::default()
}

View file

@ -26,7 +26,6 @@ export type TargetInfo = {
kind: TargetKind kind: TargetKind
lib: boolean lib: boolean
bin: boolean bin: boolean
scripts?: string[]
} }
export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau" export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau"

View file

@ -2,7 +2,7 @@
import { page } from "$app/stores" import { page } from "$app/stores"
import GitHub from "$lib/components/GitHub.svelte" import GitHub from "$lib/components/GitHub.svelte"
import type { TargetInfo } from "$lib/registry-api" import type { TargetInfo } from "$lib/registry-api"
import { BinaryIcon, Globe, Icon, LibraryIcon, Mail, ScrollIcon } from "lucide-svelte" import { BinaryIcon, Globe, Icon, LibraryIcon, Mail } from "lucide-svelte"
import type { ComponentType } from "svelte" import type { ComponentType } from "svelte"
import TargetSelector from "../../TargetSelector.svelte" import TargetSelector from "../../TargetSelector.svelte"
import Command from "./Command.svelte" import Command from "./Command.svelte"
@ -36,13 +36,11 @@
const exportNames: Partial<Record<keyof TargetInfo, string>> = { const exportNames: Partial<Record<keyof TargetInfo, string>> = {
lib: "Library", lib: "Library",
bin: "Binary", bin: "Binary",
scripts: "Scripts",
} }
const exportIcons: Partial<Record<keyof TargetInfo, ComponentType<Icon>>> = { const exportIcons: Partial<Record<keyof TargetInfo, ComponentType<Icon>>> = {
lib: LibraryIcon, lib: LibraryIcon,
bin: BinaryIcon, bin: BinaryIcon,
scripts: ScrollIcon,
} }
const exportEntries = $derived( const exportEntries = $derived(
@ -94,30 +92,20 @@
<ul class="mb-6 space-y-0.5"> <ul class="mb-6 space-y-0.5">
{#each exportEntries as [exportKey, exportName]} {#each exportEntries as [exportKey, exportName]}
{@const Icon = exportIcons[exportKey as keyof TargetInfo]} {@const Icon = exportIcons[exportKey as keyof TargetInfo]}
<li> <li class="flex items-center">
<div class="flex items-center">
<Icon aria-hidden="true" class="text-primary mr-2 size-5" /> <Icon aria-hidden="true" class="text-primary mr-2 size-5" />
{exportName} {exportName}
</div>
{#if exportKey === "bin"}
<p class="text-body/80 mb-4 mt-3 text-sm">
This package provides a binary that can be executed after installation, or globally
via:
</p>
<Command command={xCommand} class="mb-6" />
{:else if exportKey === "scripts"}
<div class="text-body/80 mt-3 flex flex-wrap gap-2 text-sm">
{#each currentTarget?.scripts ?? [] as script}
<div class="bg-card text-heading w-max truncate rounded px-3 py-2" title={script}>
{script}
</div>
{/each}
</div>
{/if}
</li> </li>
{/each} {/each}
</ul> </ul>
{#if currentTarget?.bin}
<p class="text-body/80 -mt-3 mb-4 text-sm">
This package provides a binary that can be executed after installation, or globally via:
</p>
<Command command={xCommand} class="mb-6" />
{/if}
{#if data.pkg.authors && data.pkg.authors.length > 0} {#if data.pkg.authors && data.pkg.authors.length > 0}
<h2 class="text-heading mb-2 text-lg font-semibold">Authors</h2> <h2 class="text-heading mb-2 text-lg font-semibold">Authors</h2>
<ul> <ul>

View file

@ -2,9 +2,7 @@
const { data } = $props() const { data } = $props()
</script> </script>
<div <div class="prose min-w-0 py-8 prose-pre:w-full prose-pre:overflow-auto">
class="prose prose-pre:w-full prose-pre:overflow-auto prose-img:inline-block prose-img:m-0 prose-video:inline-block prose-video:m-0 min-w-0 py-8"
>
<!-- eslint-disable-next-line svelte/no-at-html-tags --> <!-- eslint-disable-next-line svelte/no-at-html-tags -->
{@html data.readmeHtml} {@html data.readmeHtml}
</div> </div>