mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-04 10:50:55 +01:00
Compare commits
94 commits
v0.5.0-rc.
...
0.5
Author | SHA1 | Date | |
---|---|---|---|
|
32906400ec | ||
|
5c2f831c26 | ||
|
97d9251f69 | ||
|
89a2103164 | ||
|
0c159e7689 | ||
|
4f75af88b7 | ||
|
f009c957ca | ||
3569ff32cd | |||
|
c3e764ddda | ||
|
db3335bbf7 | ||
|
711b0009cb | ||
|
f88b800d51 | ||
|
28df3bcca4 | ||
|
0f74e2efa3 | ||
|
a6c1108d5b | ||
|
9535175a45 | ||
|
d9d27cf45b | ||
|
60fb68fcf3 | ||
|
78976834b2 | ||
|
52603ea43e | ||
|
0dde647042 | ||
|
3196a83b25 | ||
|
d387c27f16 | ||
|
a6846597ca | ||
|
3810a3b9ff | ||
|
52c502359b | ||
|
7d1e20da8c | ||
|
d35f34e8f0 | ||
|
9ee75ec9c9 | ||
|
919b0036e5 | ||
|
7466131f04 | ||
|
0be7dd4d0e | ||
|
f8d0bc6c4d | ||
|
381740d2ce | ||
|
a7ea8eb9c1 | ||
|
4a3619c26e | ||
|
16ab05ec72 | ||
|
36e6f16ca6 | ||
|
4843424dba | ||
|
e51bc9f9bb | ||
|
6d8731f1e5 | ||
|
49a42dc931 | ||
|
13594d6103 | ||
|
eab46e4ee5 | ||
|
7311427518 | ||
|
c94f0e55ec | ||
|
15af291f84 | ||
|
2b2d280fe0 | ||
|
0fa17a839f | ||
|
e30ec8a6cf | ||
|
f6fce8be9e | ||
|
4d3ddd50cb | ||
|
5513ef41a3 | ||
|
ac74c57709 | ||
|
5ba8c5dbb4 | ||
|
7b592bb719 | ||
|
f7d2d7cbb0 | ||
|
91a3a9b122 | ||
|
b53457c42c | ||
|
a4162cd300 | ||
|
e807c261a2 | ||
|
11a356c99a | ||
|
af30701a21 | ||
|
81ecd02df2 | ||
|
70f3bec275 | ||
|
385e36f1e4 | ||
|
f69c05a05a | ||
|
564d9de675 | ||
|
e5e2bbeeb4 | ||
|
f0aafe212d | ||
|
9b31718a0e | ||
|
083bf3badd | ||
|
43d0949a45 | ||
|
b475ff40e5 | ||
|
cb17c419d0 | ||
|
3aadebf3ea | ||
|
56579e38b2 | ||
|
4eeced440d | ||
|
60dafa0114 | ||
|
a9243b0214 | ||
|
97cc58afcf | ||
|
b5b3257cac | ||
|
15d6655889 | ||
|
80c47aa0e4 | ||
|
2c003c62aa | ||
|
b6a4d39c51 | ||
|
37a7c34084 | ||
|
dad3fad402 | ||
|
33917424a8 | ||
|
9268159dc6 | ||
|
3d662a6de3 | ||
|
bb92a06d64 | ||
|
a067fbd4bd | ||
|
e9bb21835c |
93 changed files with 3985 additions and 2265 deletions
79
.github/workflows/debug.yml
vendored
Normal file
79
.github/workflows/debug.yml
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
name: Debug
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
get-version:
|
||||
name: Get build version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: v${{ steps.get_version.outputs.value }}+rev.g${{ steps.trim_sha.outputs.trimmed_sha }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get package version
|
||||
uses: SebRollen/toml-action@v1.2.0
|
||||
id: get_version
|
||||
with:
|
||||
file: Cargo.toml
|
||||
field: package.version
|
||||
|
||||
- name: Trim commit SHA
|
||||
id: trim_sha
|
||||
run: |
|
||||
commit_sha=${{ github.sha }}
|
||||
echo "trimmed_sha=${commit_sha:0:7}" | tee $GITHUB_OUTPUT
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- job-name: windows-x86_64
|
||||
target: x86_64-pc-windows-msvc
|
||||
runs-on: windows-latest
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-windows-x86_64
|
||||
|
||||
- job-name: linux-x86_64
|
||||
target: x86_64-unknown-linux-gnu
|
||||
runs-on: ubuntu-latest
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
|
||||
|
||||
- job-name: macos-x86_64
|
||||
target: x86_64-apple-darwin
|
||||
runs-on: macos-13
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-x86_64
|
||||
|
||||
- job-name: macos-aarch64
|
||||
target: aarch64-apple-darwin
|
||||
runs-on: macos-latest
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-macos-aarch64
|
||||
|
||||
name: Build for ${{ matrix.job-name }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
needs: get-version
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libdbus-1-dev pkg-config
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Compile in debug mode
|
||||
run: cargo build --bins --no-default-features --features bin,patches,wally-compat --target ${{ matrix.target }} --locked
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact-name }}
|
||||
if-no-files-found: error
|
||||
path: |
|
||||
target/${{ matrix.target }}/debug/pesde.exe
|
||||
target/${{ matrix.target }}/debug/pesde
|
50
.github/workflows/release.yaml
vendored
50
.github/workflows/release.yaml
vendored
|
@ -4,8 +4,44 @@ on:
|
|||
tags:
|
||||
- v*
|
||||
env:
|
||||
CRATE_NAME: pesde
|
||||
BIN_NAME: pesde
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.extract_version.outputs.VERSION }}
|
||||
found: ${{ steps.ensure_not_published.outputs.FOUND }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Extract version
|
||||
id: extract_version
|
||||
shell: bash
|
||||
run: |
|
||||
VERSION=$(echo ${{ github.ref_name }} | cut -d'+' -f1 | cut -c 2-)
|
||||
echo "VERSION=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Ensure not published
|
||||
id: ensure_not_published
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ steps.extract_version.outputs.VERSION }}
|
||||
run: |
|
||||
CRATE_NAME="${{ env.CRATE_NAME }}"
|
||||
if [ ${#CRATE_NAME} -eq 1 ]; then
|
||||
DIR="1"
|
||||
elif [ ${#CRATE_NAME} -eq 2 ]; then
|
||||
DIR="2"
|
||||
elif [ ${#CRATE_NAME} -eq 3 ]; then
|
||||
DIR="3/${CRATE_NAME:0:1}"
|
||||
else
|
||||
DIR="${CRATE_NAME:0:2}/${CRATE_NAME:2:2}"
|
||||
fi
|
||||
|
||||
FOUND=$(curl -sSL --fail-with-body "https://index.crates.io/$DIR/${{ env.CRATE_NAME }}" | jq -s 'any(.[]; .vers == "${{ env.VERSION }}")')
|
||||
echo "FOUND=$FOUND" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
|
@ -31,13 +67,17 @@ jobs:
|
|||
target: aarch64-apple-darwin
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: Build for ${{ matrix.host }}-${{ matrix.arch }}
|
||||
needs: [ prepare ]
|
||||
if: ${{ needs.prepare.outputs.found == 'false' }}
|
||||
env:
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Set env
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_NAME=${{ env.BIN_NAME }}-$(echo ${{ github.ref_name }} | cut -c 2-)-${{ matrix.host }}-${{ matrix.arch }}
|
||||
ARCHIVE_NAME=${{ env.BIN_NAME }}-${{ env.VERSION }}-${{ matrix.host }}-${{ matrix.arch }}
|
||||
|
||||
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
|
||||
|
||||
|
@ -91,7 +131,9 @@ jobs:
|
|||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
needs: [ build, publish ]
|
||||
needs: [ prepare, publish ]
|
||||
env:
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
@ -107,7 +149,7 @@ jobs:
|
|||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
name: ${{ github.ref_name }}
|
||||
name: v${{ env.VERSION }}
|
||||
draft: true
|
||||
prerelease: ${{ startsWith(github.ref_name, 'v0') }}
|
||||
prerelease: ${{ startsWith(env.VERSION, '0') }}
|
||||
files: artifacts/*
|
185
CHANGELOG.md
185
CHANGELOG.md
|
@ -5,101 +5,93 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.5.0-rc.11] - 2024-11-20
|
||||
### Fixed
|
||||
- Add back mistakenly removed updates check caching by @daimond113
|
||||
- Set download error source to inner error to propagate the error by @daimond113
|
||||
- Correctly copy workspace packages by @daimond113
|
||||
|
||||
## [0.5.0-rc.10] - 2024-11-16
|
||||
### Fixed
|
||||
- Fix `self-install` doing a cross-device move by @daimond113
|
||||
|
||||
### Changed
|
||||
- Only store `pesde_version` executables in the version cache by @daimond113
|
||||
|
||||
## [0.5.0-rc.9] - 2024-11-16
|
||||
### Fixed
|
||||
- Correctly link Wally server packages by @daimond113
|
||||
|
||||
### Changed
|
||||
- `self-upgrade` now will check for updates by itself by default by @daimond113
|
||||
|
||||
## [0.5.0-rc.8] - 2024-11-12
|
||||
## [0.5.3] - 2024-12-30
|
||||
### Added
|
||||
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
|
||||
|
||||
### Changed
|
||||
- Remove verbosity from release mode logging by @daimond113
|
||||
|
||||
## [0.5.2] - 2024-12-19
|
||||
### Fixed
|
||||
- Change dependency types for removed peer dependencies by @daimond113
|
||||
- Resolve version to correct tag for `pesde_version` field by @daimond113
|
||||
- Do not error on missing dependencies until full linking by @daimond113
|
||||
|
||||
### Changed
|
||||
- Switch from `log` to `tracing` for logging by @daimond113
|
||||
|
||||
## [0.5.1] - 2024-12-15
|
||||
### Fixed
|
||||
- Ignore build metadata when comparing CLI versions by @daimond113
|
||||
|
||||
## [0.5.0] - 2024-12-14
|
||||
### Added
|
||||
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
||||
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
||||
- Publish all workspace members when publishing a workspace by @daimond113
|
||||
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
||||
- Support full version requirements in workspace version field by @daimond113
|
||||
- Improved authentication system for registry changes by @daimond113
|
||||
- New website by @lukadev-0
|
||||
- Add `--index` flag to `publish` command to publish to a specific index by @daimond113
|
||||
- Support fallback Wally registries by @daimond113
|
||||
- Print that no updates are available in `outdated` command by @daimond113
|
||||
- Support negated globs in `workspace_members` field by @daimond113
|
||||
- Make `includes` use glob patterns by @daimond113
|
||||
- Use symlinks for workspace dependencies to not require reinstalling by @daimond113
|
||||
- Add `auth token` command to print the auth token for the index by @daimond113
|
||||
- Support specifying which external registries are allowed on registries by @daimond113
|
||||
- Add improved CLI styling by @daimond113
|
||||
- Install pesde dependencies before Wally to support scripts packages by @daimond113
|
||||
- Support packages exporting scripts by @daimond113
|
||||
- Support using workspace root as a member by @daimond113
|
||||
- Allow multiple, user selectable scripts packages to be selected (& custom packages inputted) in `init` command by @daimond113
|
||||
- Support granular control over which repositories are allowed in various specifier types by @daimond113
|
||||
- Display included scripts in `publish` command by @daimond113
|
||||
|
||||
### Fixed
|
||||
- Fix versions with dots not being handled correctly by @daimond113
|
||||
- Use workspace specifiers' `target` field when resolving by @daimond113
|
||||
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
||||
- Remove duplicated manifest file name in `publish` command by @daimond113
|
||||
- Allow use of Luau packages in `execute` command by @daimond113
|
||||
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
||||
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
|
||||
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
|
||||
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
|
||||
- Make GitHub OAuth client ID config optional by @daimond113
|
||||
- Use updated aliases when reusing lockfile dependencies by @daimond113
|
||||
- Listen for device flow completion without requiring pressing enter by @daimond113
|
||||
- Sync scripts repo in background by @daimond113
|
||||
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
|
||||
- Validate package names are lowercase by @daimond113
|
||||
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
||||
- Remove default.project.json from Git pesde dependencies by @daimond113
|
||||
- Correctly (de)serialize workspace specifiers by @daimond113
|
||||
- Fix CAS finder algorithm issues with Windows by @daimond113
|
||||
- Fix CAS finder algorithm's AlreadyExists error by @daimond113
|
||||
- Use moved path when setting file to read-only by @daimond113
|
||||
- Correctly link Wally server packages by @daimond113
|
||||
- Fix `self-install` doing a cross-device move by @daimond113
|
||||
- Add back mistakenly removed updates check caching by @daimond113
|
||||
- Set download error source to inner error to propagate the error by @daimond113
|
||||
- Correctly copy workspace packages by @daimond113
|
||||
- Fix peer dependencies being resolved incorrectly by @daimond113
|
||||
- Set PESDE_ROOT to the correct path in `pesde run` by @daimond113
|
||||
- Install dependencies of packages in `x` command by @daimond113
|
||||
- Fix `includes` not supporting root files by @daimond113
|
||||
- Link dependencies before type extraction to support more use cases by @daimond113
|
||||
- Strip `.luau` extension from linker modules' require paths to comply with Luau by @daimond113
|
||||
- Correctly handle graph paths for resolving overriden packages by @daimond113
|
||||
- Do not require `--` in bin package executables on Unix by @daimond113
|
||||
- Do not require lib or bin exports if package exports scripts by @daimond113
|
||||
- Correctly resolve URLs in `publish` command by @daimond113
|
||||
- Add Roblox types in linker modules even with no config generator script by @daimond113
|
||||
|
||||
### Changed
|
||||
- Switched to fs-err for better errors with file system operations by @daimond113
|
||||
- Use body bytes over multipart for publishing packages by @daimond113
|
||||
### Removed
|
||||
- Remove special scripts repo handling to favour standard packages by @daimond113
|
||||
|
||||
### Performance
|
||||
- Switch to async Rust by @daimond113
|
||||
|
||||
## [0.5.0-rc.7] - 2024-10-30
|
||||
### Added
|
||||
- New website by @lukadev-0
|
||||
|
||||
### Fixed
|
||||
- Use updated aliases when reusing lockfile dependencies by @daimond113
|
||||
- Listen for device flow completion without requiring pressing enter by @daimond113
|
||||
- Sync scripts repo in background by @daimond113
|
||||
- Don't make CAS files read-only on Windows (file removal is disallowed if the file is read-only) by @daimond113
|
||||
- Validate package names are lowercase by @daimond113
|
||||
|
||||
### Performance
|
||||
- Clone dependency repos shallowly by @daimond113
|
||||
|
||||
### Changed
|
||||
- Optimize boolean expression in `publish` command by @daimond113
|
||||
|
||||
## [0.5.0-rc.6] - 2024-10-14
|
||||
### Added
|
||||
- Support full version requirements in workspace version field by @daimond113
|
||||
- Improved authentication system for registry changes by @daimond113
|
||||
|
||||
### Fixed
|
||||
- Correct `pesde.toml` inclusion message in `publish` command by @daimond113
|
||||
- Allow writes to files when `link` is false in PackageFS::write_to by @daimond113
|
||||
- Handle missing revisions in AnyPackageIdentifier::from_str by @daimond113
|
||||
- Make GitHub OAuth client ID config optional by @daimond113
|
||||
|
||||
## [0.5.0-rc.5] - 2024-10-12
|
||||
### Added
|
||||
- Inform user about not finding any bin package when using its bin invocation by @daimond113
|
||||
|
||||
### Fixed
|
||||
- Fix `self-upgrade` overwriting its own binary by @daimond113
|
||||
- Allow use of Luau packages in `execute` command by @daimond113
|
||||
- Remove duplicated manifest file name in `publish` command by @daimond113
|
||||
|
||||
## [0.5.0-rc.4] - 2024-10-12
|
||||
### Added
|
||||
- Add `yes` argument to skip all prompts in publish command by @daimond113
|
||||
- Publish all workspace members when publishing a workspace by @daimond113
|
||||
|
||||
### Fixed
|
||||
- Add feature gates to `wally-compat` specific code in init command by @daimond113
|
||||
|
||||
## [0.5.0-rc.3] - 2024-10-06
|
||||
### Fixed
|
||||
- Use workspace specifiers' `target` field when resolving by @daimond113
|
||||
|
||||
## [0.5.0-rc.2] - 2024-10-06
|
||||
### Added
|
||||
- Add support for multiple targets under the same package name in workspace members by @daimond113
|
||||
### Fixed
|
||||
- Fix versions with dots not being handled correctly by @daimond113
|
||||
|
||||
## [0.5.0-rc.1] - 2024-10-06
|
||||
### Changed
|
||||
- Rewrite the entire project in a more maintainable way by @daimond113
|
||||
- Support workspaces by @daimond113
|
||||
|
@ -107,15 +99,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
- Support multiple targets for a single package by @daimond113
|
||||
- Make registry much easier to self-host by @daimond113
|
||||
- Start maintaining a changelog by @daimond113
|
||||
- Optimize boolean expression in `publish` command by @daimond113
|
||||
- Switched to fs-err for better errors with file system operations by @daimond113
|
||||
- Use body bytes over multipart for publishing packages by @daimond113
|
||||
- `self-upgrade` now will check for updates by itself by default by @daimond113
|
||||
- Only store `pesde_version` executables in the version cache by @daimond113
|
||||
- Remove lower bound limit of 3 characters for pesde package names by @daimond113
|
||||
|
||||
[0.5.0-rc.11]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.10..v0.5.0-rc.11
|
||||
[0.5.0-rc.10]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.9..v0.5.0-rc.10
|
||||
[0.5.0-rc.9]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.8..v0.5.0-rc.9
|
||||
[0.5.0-rc.8]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.7..v0.5.0-rc.8
|
||||
[0.5.0-rc.7]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.6..v0.5.0-rc.7
|
||||
[0.5.0-rc.6]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.5..v0.5.0-rc.6
|
||||
[0.5.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.4..v0.5.0-rc.5
|
||||
[0.5.0-rc.4]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.3..v0.5.0-rc.4
|
||||
[0.5.0-rc.3]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.2..v0.5.0-rc.3
|
||||
[0.5.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.5.0-rc.1..v0.5.0-rc.2
|
||||
[0.5.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0-rc.1
|
||||
### Performance
|
||||
- Clone dependency repos shallowly by @daimond113
|
||||
- Switch to async Rust by @daimond113
|
||||
- Asyncify dependency linking by @daimond113
|
||||
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113
|
||||
|
||||
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0
|
||||
[0.5.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
||||
|
|
1059
Cargo.lock
generated
1059
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
74
Cargo.toml
74
Cargo.toml
|
@ -1,38 +1,38 @@
|
|||
[package]
|
||||
name = "pesde"
|
||||
version = "0.5.0-rc.11"
|
||||
version = "0.5.3"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
authors = ["daimond113 <contact@daimond113.com>"]
|
||||
description = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||
homepage = "https://pesde.daimond113.com"
|
||||
repository = "https://github.com/daimond113/pesde"
|
||||
repository = "https://github.com/pesde-pkg/pesde"
|
||||
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
|
||||
|
||||
[features]
|
||||
bin = [
|
||||
"clap",
|
||||
"dirs",
|
||||
"pretty_env_logger",
|
||||
"dep:clap",
|
||||
"dep:dirs",
|
||||
"dep:tracing-subscriber",
|
||||
"reqwest/json",
|
||||
"indicatif",
|
||||
"indicatif-log-bridge",
|
||||
"inquire",
|
||||
"toml_edit",
|
||||
"colored",
|
||||
"anyhow",
|
||||
"keyring",
|
||||
"open",
|
||||
"dep:indicatif",
|
||||
"dep:tracing-indicatif",
|
||||
"dep:inquire",
|
||||
"dep:toml_edit",
|
||||
"dep:colored",
|
||||
"dep:anyhow",
|
||||
"dep:keyring",
|
||||
"dep:open",
|
||||
"gix/worktree-mutation",
|
||||
"serde_json",
|
||||
"winreg",
|
||||
"dep:serde_json",
|
||||
"dep:winreg",
|
||||
"fs-err/expose_original_error",
|
||||
"tokio/rt",
|
||||
"tokio/rt-multi-thread",
|
||||
"tokio/macros",
|
||||
]
|
||||
wally-compat = ["async_zip", "serde_json"]
|
||||
patches = ["git2"]
|
||||
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||
patches = ["dep:git2"]
|
||||
version-management = ["bin"]
|
||||
|
||||
[[bin]]
|
||||
|
@ -44,46 +44,46 @@ required-features = ["bin"]
|
|||
uninlined_format_args = "warn"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.214", features = ["derive"] }
|
||||
serde = { version = "1.0.216", features = ["derive"] }
|
||||
toml = "0.8.19"
|
||||
serde_with = "3.11.0"
|
||||
gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||
semver = { version = "1.0.23", features = ["serde"] }
|
||||
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||
semver = { version = "1.0.24", features = ["serde"] }
|
||||
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
|
||||
tokio-tar = "0.3.1"
|
||||
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
|
||||
pathdiff = "0.2.2"
|
||||
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||
pathdiff = "0.2.3"
|
||||
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||
log = "0.4.22"
|
||||
thiserror = "2.0.2"
|
||||
tokio = "1.41.0"
|
||||
tokio-util = "0.7.12"
|
||||
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||
thiserror = "2.0.7"
|
||||
tokio = { version = "1.42.0", features = ["process"] }
|
||||
tokio-util = "0.7.13"
|
||||
async-stream = "0.3.6"
|
||||
futures = "0.3.31"
|
||||
full_moon = { version = "1.1.0", features = ["luau"] }
|
||||
url = { version = "2.5.3", features = ["serde"] }
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
full_moon = { version = "1.1.2", features = ["luau"] }
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
sha2 = "0.10.8"
|
||||
tempfile = "3.14.0"
|
||||
glob = "0.3.1"
|
||||
wax = { version = "0.6.0", default-features = false }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
|
||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||
git2 = { version = "0.19.0", optional = true }
|
||||
|
||||
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
||||
serde_json = { version = "1.0.132", optional = true }
|
||||
serde_json = { version = "1.0.133", optional = true }
|
||||
|
||||
anyhow = { version = "1.0.93", optional = true }
|
||||
open = { version = "5.3.0", optional = true }
|
||||
anyhow = { version = "1.0.94", optional = true }
|
||||
open = { version = "5.3.1", optional = true }
|
||||
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
|
||||
colored = { version = "2.1.0", optional = true }
|
||||
toml_edit = { version = "0.22.22", optional = true }
|
||||
clap = { version = "4.5.20", features = ["derive"], optional = true }
|
||||
clap = { version = "4.5.23", features = ["derive"], optional = true }
|
||||
dirs = { version = "5.0.1", optional = true }
|
||||
pretty_env_logger = { version = "0.5.0", optional = true }
|
||||
indicatif = { version = "0.17.8", optional = true }
|
||||
indicatif-log-bridge = { version = "0.2.3", optional = true }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||
indicatif = { version = "0.17.9", optional = true }
|
||||
tracing-indicatif = { version = "0.3.8", optional = true }
|
||||
inquire = { version = "0.7.5", optional = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<br>
|
||||
|
||||
<div align="center">
|
||||
<img src="https://raw.githubusercontent.com/daimond113/pesde/0.5/assets/logotype.svg" alt="pesde logo" width="200" />
|
||||
<img src="https://raw.githubusercontent.com/pesde-pkg/pesde/0.5/assets/logotype.svg" alt="pesde logo" width="200" />
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
@ -14,7 +14,7 @@ designed with multiple targets in mind, namely Roblox, Lune, and Luau.
|
|||
## Installation
|
||||
|
||||
pesde can be installed from GitHub Releases. You can find the latest release
|
||||
[here](https://github.com/daimond113/pesde/releases). Once you have downloaded
|
||||
[here](https://github.com/pesde-pkg/pesde/releases). Once you have downloaded
|
||||
the binary, run the following command to install it:
|
||||
|
||||
```sh
|
||||
|
|
25
SECURITY.md
Normal file
25
SECURITY.md
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
As pesde is currently in version 0.x, we can only guarantee security for:
|
||||
- **The latest minor** (currently 0.5).
|
||||
- **The latest release candidate for the next version**, if available.
|
||||
|
||||
When a new minor version is released, the previous version will immediately lose security support.
|
||||
> **Note:** This policy will change with the release of version 1.0, which will include an extended support period for versions >=1.0.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 0.5.x | :white_check_mark: |
|
||||
| < 0.5 | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We encourage all security concerns to be reported at [pesde@daimond113.com](mailto:pesde@daimond113.com), along the following format:
|
||||
- **Subject**: The subject must be prefixed with `[SECURITY]` to ensure it is prioritized as a security concern.
|
||||
- **Content**:
|
||||
- **Affected Versions**: Clearly specify which are affected by the issue.
|
||||
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
|
||||
|
||||
We will try to respond as soon as possible.
|
|
@ -2,15 +2,14 @@ import starlight from "@astrojs/starlight"
|
|||
import tailwind from "@astrojs/tailwind"
|
||||
import { defineConfig } from "astro/config"
|
||||
|
||||
import vercel from "@astrojs/vercel/serverless"
|
||||
|
||||
// https://astro.build/config
|
||||
export default defineConfig({
|
||||
site: "https://docs.pesde.daimond113.com",
|
||||
integrations: [
|
||||
starlight({
|
||||
title: "pesde docs",
|
||||
social: {
|
||||
github: "https://github.com/daimond113/pesde",
|
||||
github: "https://github.com/pesde-pkg/pesde",
|
||||
},
|
||||
sidebar: [
|
||||
{
|
||||
|
@ -105,6 +104,4 @@ export default defineConfig({
|
|||
vite: {
|
||||
envDir: "..",
|
||||
},
|
||||
output: "hybrid",
|
||||
adapter: vercel(),
|
||||
})
|
||||
|
|
BIN
docs/bun.lockb
BIN
docs/bun.lockb
Binary file not shown.
|
@ -14,7 +14,6 @@
|
|||
"@astrojs/starlight": "^0.28.2",
|
||||
"@astrojs/starlight-tailwind": "^2.0.3",
|
||||
"@astrojs/tailwind": "^5.1.1",
|
||||
"@astrojs/vercel": "^7.8.1",
|
||||
"@fontsource-variable/nunito-sans": "^5.1.0",
|
||||
"@shikijs/rehype": "^1.21.0",
|
||||
"astro": "^4.15.9",
|
||||
|
@ -27,4 +26,4 @@
|
|||
"prettier-plugin-astro": "^0.14.1",
|
||||
"prettier-plugin-tailwindcss": "^0.6.8"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,7 +3,12 @@
|
|||
href="https://pesde.daimond113.com/"
|
||||
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
|
||||
>
|
||||
<svg viewBox="0 0 56 28" class="h-7" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<svg
|
||||
viewBox="0 0 56 28"
|
||||
class="h-7"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<title>pesde</title>
|
||||
<path
|
||||
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
|
||||
|
@ -22,7 +27,8 @@
|
|||
fill="currentColor"></path>
|
||||
</svg>
|
||||
</a>
|
||||
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span>
|
||||
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
|
||||
>
|
||||
<a
|
||||
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
|
||||
href="/">docs</a
|
||||
|
|
|
@ -8,9 +8,9 @@ A **binary package** is a package that contains a binary export.
|
|||
Binary packages can be run like a normal program. There are several ways to use
|
||||
binary packages with pesde.
|
||||
|
||||
To make your own binary package, see the [guide](/quickstart#adding-a-main-script).
|
||||
## Using a binary package
|
||||
|
||||
## Using `pesde x`
|
||||
### With `pesde x`
|
||||
|
||||
The `pesde x` command can be used to run a one-off binary package. This is
|
||||
useful for running a binary package without installing it or outside of a pesde
|
||||
|
@ -21,7 +21,7 @@ pesde x pesde/hello
|
|||
# Hello, pesde! (pesde/hello@1.0.0, lune)
|
||||
```
|
||||
|
||||
## Installing a binary package
|
||||
### By installing
|
||||
|
||||
Binary packages can be installed using the `pesde add` and `pesde install`
|
||||
commands.
|
||||
|
@ -35,9 +35,40 @@ pesde install
|
|||
```
|
||||
|
||||
This will add the binary package to your `PATH`, meaning that it can be run
|
||||
anywhere!
|
||||
anywhere in a project which has it installed under that alias!
|
||||
|
||||
```sh
|
||||
hello
|
||||
# Hello, pesde! (pesde/hello@1.0.0, lune)
|
||||
```
|
||||
|
||||
## Making a binary package
|
||||
|
||||
To make a binary package you must use a target compatible with binary exports.
|
||||
These currently are `lune` and `luau`.
|
||||
|
||||
Here is an example of a binary package:
|
||||
|
||||
```toml title="pesde.toml"
|
||||
name = "pesde/hello"
|
||||
version = "1.0.0"
|
||||
license = "MIT"
|
||||
|
||||
[target]
|
||||
environment = "lune"
|
||||
bin = "main.luau"
|
||||
```
|
||||
|
||||
The `bin` field specifies the entry point for the binary package. This file
|
||||
will be run when the binary package is executed.
|
||||
|
||||
```luau title="main.luau"
|
||||
print("Hello, pesde!")
|
||||
```
|
||||
|
||||
Binary packages get access to custom variables provided by pesde. You can find
|
||||
them in the `_G` table. These are:
|
||||
|
||||
- `PESDE_ROOT`: The root (where the pesde.toml is located) of where the package is
|
||||
installed. This will be in a temporary directory if the package is run with
|
||||
`pesde x`.
|
||||
|
|
|
@ -17,7 +17,7 @@ dependencies on packages published to a [pesde registry](https://pesde.daimond11
|
|||
|
||||
```toml title="pesde.toml"
|
||||
[indices]
|
||||
default = "https://github.com/daimond113/pesde-index"
|
||||
default = "https://github.com/pesde-pkg/index"
|
||||
|
||||
[dependencies]
|
||||
hello = { name = "pesde/hello", version = "^1.0.0" }
|
||||
|
@ -38,17 +38,17 @@ Git dependencies are dependencies on packages hosted on a Git repository.
|
|||
|
||||
```toml title="pesde.toml"
|
||||
[dependencies]
|
||||
acme = { repo = "acme/package", rev = "main" }
|
||||
acme = { repo = "acme/package", rev = "aeff6" }
|
||||
```
|
||||
|
||||
In this example, we're specifying a dependency on the package contained within
|
||||
the `acme/package` GitHub repository at the `main` branch.
|
||||
the `acme/package` GitHub repository at the `aeff6` commit.
|
||||
|
||||
You can also use a URL to specify the Git repository and a specific commit.
|
||||
You can also use a URL to specify the Git repository and a tag for the revision.
|
||||
|
||||
```toml title="pesde.toml"
|
||||
[dependencies]
|
||||
acme = { repo = "https://git.acme.local/package.git", rev = "aeff6" }
|
||||
acme = { repo = "https://git.acme.local/package.git", rev = "v0.1.0" }
|
||||
```
|
||||
|
||||
You can also specify a path if the package is not at the root of the repository.
|
||||
|
|
|
@ -12,15 +12,14 @@ Before you can publish a package, you must configure the required fields in your
|
|||
|
||||
### `includes`
|
||||
|
||||
The `includes` field is a list of files and directories that should be included
|
||||
in the package.
|
||||
The `includes` field is a list of globs that should be included in the package.
|
||||
|
||||
```toml
|
||||
includes = [
|
||||
"pesde.toml",
|
||||
"README.md",
|
||||
"LICENSE",
|
||||
"init.luau",
|
||||
"src/**/*.luau",
|
||||
]
|
||||
```
|
||||
|
||||
|
|
|
@ -9,33 +9,30 @@ pesde can be used in Roblox projects, however this requires some extra setup.
|
|||
Namely, you need to specify a `roblox_sync_config_generator` script in order
|
||||
to generate the adequate configuration for the sync tool you are using.
|
||||
|
||||
The [`pesde-scripts`](https://github.com/daimond113/pesde-scripts)
|
||||
The [`pesde-scripts`](https://github.com/pesde-pkg/scripts)
|
||||
repository contains a list of scripts for different sync tools. If the tool
|
||||
you are using is not supported, you can write your own script and submit a PR
|
||||
to get it added.
|
||||
|
||||
These scripts are automatically cloned into the `~/.pesde/scripts` folder and
|
||||
kept up to date when you use pesde.
|
||||
|
||||
## Usage with Rojo
|
||||
|
||||
[Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox
|
||||
Studio.
|
||||
|
||||
Running `pesde init` will prompt you to select a target, select
|
||||
`roblox` or `roblox_server` in this case. This will setup the configuration
|
||||
needed to use pesde in a project using Rojo.
|
||||
`roblox` or `roblox_server` in this case. You will be prompted to pick out a
|
||||
scripts package. Select `pesde/scripts_rojo` to get started with Rojo.
|
||||
|
||||
## Usage with other tools
|
||||
|
||||
If you are using a different sync tool, you should look for it's scripts in the
|
||||
pesde-scripts repository. If you cannot find them, you can write your own and
|
||||
optionally submit a PR to help others using the same tool as you get started
|
||||
quicker.
|
||||
If you are using a different sync tool, you should look for it's scripts
|
||||
package on the registry. If you cannot find it, you can write your own and
|
||||
optionally submit a PR to pesde-scripts to help others using the same tool as
|
||||
you get started quicker.
|
||||
|
||||
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
|
||||
target, and then replace the `.pesde/roblox_sync_config_generator.luau` script
|
||||
with the one you want to use.
|
||||
target, and then create a `.pesde/roblox_sync_config_generator.luau` script
|
||||
and put it's path in the manifest.
|
||||
|
||||
## Authoring packages
|
||||
|
||||
|
@ -99,7 +96,7 @@ includes = [
|
|||
"pesde.toml",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"src",
|
||||
"src/**/*.luau",
|
||||
]
|
||||
|
||||
[target]
|
||||
|
|
53
docs/src/content/docs/guides/scripts-packages.mdx
Normal file
53
docs/src/content/docs/guides/scripts-packages.mdx
Normal file
|
@ -0,0 +1,53 @@
|
|||
---
|
||||
title: Using Scripts Packages
|
||||
description: Learn how to use scripts packages.
|
||||
---
|
||||
|
||||
A **scripts package** is a package that contains scripts. The scripts provided
|
||||
by the package are linked in `.pesde/{alias}/{script_name}.luau` of the project
|
||||
that uses the package.
|
||||
|
||||
## Using a scripts package
|
||||
|
||||
Scripts packages can be installed using the `pesde add` and `pesde install`
|
||||
commands.
|
||||
|
||||
This requires a `pesde.toml` file to be present in the current directory, and
|
||||
will add the scripts package to the `dependencies` section of the file.
|
||||
|
||||
```sh
|
||||
pesde add pesde/scripts_rojo
|
||||
pesde install
|
||||
```
|
||||
|
||||
This will add the scripts package to your project, and installing will put the
|
||||
scripts at `.pesde/scripts_rojo/{script_name}.luau`. You can then add the scripts
|
||||
to your manifest, for example:
|
||||
|
||||
```toml title="pesde.toml"
|
||||
[scripts]
|
||||
roblox_sync_config_generator = ".pesde/scripts_rojo/roblox_sync_config_generator.luau"
|
||||
```
|
||||
|
||||
## Making a scripts package
|
||||
|
||||
To make a scripts package you must use a target compatible with scripts exports.
|
||||
These currently are `lune` and `luau`.
|
||||
|
||||
Here is an example of a scripts package:
|
||||
|
||||
```toml title="pesde.toml"
|
||||
name = "pesde/scripts_rojo"
|
||||
version = "1.0.0"
|
||||
license = "MIT"
|
||||
|
||||
[target]
|
||||
environment = "lune"
|
||||
|
||||
[target.scripts]
|
||||
roblox_sync_config_generator = "roblox_sync_config_generator.luau"
|
||||
```
|
||||
|
||||
The `scripts` table in the target is a map of script names to the path of the
|
||||
script in the package. The scripts will be linked in the project that uses the
|
||||
package at `.pesde/{alias}/{script_name}.luau`.
|
|
@ -19,10 +19,10 @@ To create an index, create a new repository and add a `config.toml` file with
|
|||
the following content:
|
||||
|
||||
```toml title="config.toml"
|
||||
# The URL of the registry API
|
||||
# the URL of the registry API
|
||||
api = "https://registry.acme.local/"
|
||||
|
||||
# Package download URL (optional)
|
||||
# package download URL (optional)
|
||||
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
|
||||
|
||||
# the client ID of the GitHub OAuth app (optional)
|
||||
|
@ -33,13 +33,16 @@ git_allowed = true
|
|||
|
||||
# whether to allow packages which depend on packages from other registries
|
||||
# (default: false)
|
||||
other_registries_allowed = true
|
||||
other_registries_allowed = ["https://git.acme.local/index"]
|
||||
|
||||
# whether to allow packages with Wally dependencies (default: false)
|
||||
wally_allowed = false
|
||||
|
||||
# the maximum size of the archive in bytes (default: 4MB)
|
||||
max_archive_size = 4194304
|
||||
|
||||
# the scripts packages present in the `init` command selection by default
|
||||
scripts_packages = ["pesde/scripts_rojo"]
|
||||
```
|
||||
|
||||
- **api**: The URL of the registry API. See below for more information.
|
||||
|
@ -60,18 +63,24 @@ max_archive_size = 4194304
|
|||
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
|
||||
authentication. See below for more information.
|
||||
|
||||
- **git_allowed**: Whether to allow packages with Git dependencies. This is
|
||||
optional and defaults to `false`.
|
||||
- **git_allowed**: Whether to allow packages with Git dependencies. This can be
|
||||
either a bool or a list of allowed repository URLs. This is optional and
|
||||
defaults to `false`.
|
||||
|
||||
- **other_registries_allowed**: Whether to allow packages which depend on
|
||||
packages from other registries. This is optional and defaults to `false`.
|
||||
packages from other registries. This can be either a bool or a list of
|
||||
allowed index repository URLs. This is optional and defaults to `false`.
|
||||
|
||||
- **wally_allowed**: Whether to allow packages with Wally dependencies. This is
|
||||
- **wally_allowed**: Whether to allow packages with Wally dependencies. This can
|
||||
be either a bool or a list of allowed index repository URLs. This is
|
||||
optional and defaults to `false`.
|
||||
|
||||
- **max_archive_size**: The maximum size of the archive in bytes. This is
|
||||
optional and defaults to `4194304` (4MB).
|
||||
|
||||
- **scripts_packages**: The scripts packages present in the `init` command
|
||||
selection by default. This is optional and defaults to none.
|
||||
|
||||
You should then push this repository to [GitHub](https://github.com/).
|
||||
|
||||
## Configuring the registry
|
||||
|
@ -80,7 +89,7 @@ The registry is a web server that provides package downloads and the ability to
|
|||
publish packages.
|
||||
|
||||
The official registry implementation is available in the
|
||||
[pesde GitHub repository](https://github.com/daimond113/pesde/tree/0.5/registry).
|
||||
[pesde GitHub repository](https://github.com/pesde-pkg/pesde/tree/0.5/registry).
|
||||
|
||||
Configuring the registry is done using environment variables. In order to allow
|
||||
the registry to access the index repository, you must use an account that
|
||||
|
@ -97,7 +106,7 @@ for this purpose.
|
|||
### General configuration
|
||||
|
||||
- **INDEX_REPO_URL**: The URL of the index repository. This is required.\
|
||||
Example: `https://github.com/daimond113/pesde-index.git`
|
||||
Example: `https://github.com/pesde-pkg/index.git`
|
||||
|
||||
- **GIT_USERNAME**: The username of a Git account that has push access to the
|
||||
index repository. This is required.
|
||||
|
@ -193,7 +202,7 @@ The registry supports [Sentry](https://sentry.io/) for error tracking.
|
|||
First clone the repository and navigate to the repository directory:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/daimond113/pesde.git
|
||||
git clone https://github.com/pesde-pkg/pesde.git
|
||||
cd pesde
|
||||
```
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ You can follow the installation instructions in the
|
|||
|
||||
<Steps>
|
||||
|
||||
1. Go to the [GitHub releases page](https://github.com/daimond113/pesde/releases/latest).
|
||||
1. Go to the [GitHub releases page](https://github.com/pesde-pkg/pesde/releases/latest).
|
||||
|
||||
2. Download the corresponding archive for your operating system. You can choose
|
||||
whether to use the `.zip` or `.tar.gz` files.
|
||||
|
@ -41,6 +41,16 @@ You can follow the installation instructions in the
|
|||
pesde should now be installed on your system. You may need to restart your
|
||||
computer for the changes to take effect.
|
||||
|
||||
<Aside type="caution">
|
||||
pesde uses symlinks which are an administrator-level operation on Windows.
|
||||
To ensure proper functionality, enable [Developer Mode](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
|
||||
|
||||
|
||||
If you are getting errors such as `Failed to symlink file, a required
|
||||
privilege is not held by the client`, then enabling this setting will fix
|
||||
them.
|
||||
</Aside>
|
||||
|
||||
</TabItem>
|
||||
<TabItem label="Linux & macOS">
|
||||
|
||||
|
@ -59,7 +69,7 @@ You can follow the installation instructions in the
|
|||
environment variable.
|
||||
|
||||
```sh title=".zshrc"
|
||||
export PATH = "$PATH:/home/user/.pesde/bin"
|
||||
export PATH="$PATH:$HOME/.pesde/bin"
|
||||
```
|
||||
|
||||
You should then be able to run `pesde` after restarting your shell.
|
||||
|
|
|
@ -27,13 +27,13 @@ environment we're targeting is `luau`.
|
|||
```sh
|
||||
pesde init
|
||||
|
||||
# What is the name of the project? <username>/hello_pesde
|
||||
# What is the description of the project? (leave empty for none)
|
||||
# Who are the authors of this project? (leave empty for none, comma separated)
|
||||
# What is the repository URL of this project? (leave empty for none)
|
||||
# What is the license of this project? (leave empty for none) MIT
|
||||
# What environment are you targeting for your package? luau
|
||||
# Would you like to setup a default roblox_sync_config_generator script? No
|
||||
# what is the name of the project? <username>/hello_pesde
|
||||
# what is the description of the project?
|
||||
# who are the authors of this project?
|
||||
# what is the repository URL of this project?
|
||||
# what is the license of this project? MIT
|
||||
# what environment are you targeting for your package? luau
|
||||
# would you like to setup default Roblox compatibility scripts? No
|
||||
```
|
||||
|
||||
The command will create a `pesde.toml` file in the current folder. Go ahead
|
||||
|
@ -54,7 +54,7 @@ the path to the main script of our package.
|
|||
+ bin = "main.luau"
|
||||
|
||||
[indices]
|
||||
default = "https://github.com/daimond113/pesde-index"
|
||||
default = "https://github.com/pesde-pkg/index"
|
||||
```
|
||||
|
||||
Don't forget to save the file after making the changes.
|
||||
|
@ -98,7 +98,7 @@ You should see that `pesde.toml` has been updated with the new dependency.
|
|||
bin = "main.luau"
|
||||
|
||||
[indices]
|
||||
default = "https://github.com/daimond113/pesde-index"
|
||||
default = "https://github.com/pesde-pkg/index"
|
||||
|
||||
+ [dependencies]
|
||||
+ hello = { name = "pesde/hello", version = "^1.0.0" }
|
||||
|
@ -111,7 +111,7 @@ pesde install
|
|||
```
|
||||
|
||||
You should see that pesde has created a `luau_packages` folder containing the
|
||||
newly installed package. It has alsoo created a `pesde.lock` file, this file
|
||||
newly installed package. It has also created a `pesde.lock` file, this file
|
||||
contains the exact versions of the dependencies that were installed so that
|
||||
they can be installed again in the future.
|
||||
|
||||
|
|
|
@ -34,6 +34,10 @@ Removes the stored token for the index.
|
|||
Prints the username of the currently authenticated user of the index. Only
|
||||
works if the token is a GitHub token.
|
||||
|
||||
### `pesde auth token`
|
||||
|
||||
Prints the token for the index.
|
||||
|
||||
## `pesde config`
|
||||
|
||||
Configuration-related commands.
|
||||
|
@ -49,20 +53,7 @@ is printed.
|
|||
|
||||
- `-r, --reset`: Resets the default index.
|
||||
|
||||
The default index is [`pesde-index`](https://github.com/daimond113/pesde-index).
|
||||
|
||||
### `pesde config scripts-repo`
|
||||
|
||||
```sh
|
||||
pesde config scripts-repo [REPO]
|
||||
```
|
||||
|
||||
Configures the scripts repository. If no repository is provided, the current
|
||||
scripts repository is printed.
|
||||
|
||||
- `-r, --reset`: Resets the scripts repository.
|
||||
|
||||
The default scripts repository is [`pesde-scripts`](https://github.com/daimond113/pesde-scripts).
|
||||
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
|
||||
|
||||
## `pesde init`
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ may only contain lowercase letters, numbers, and underscores.
|
|||
|
||||
The first one to publish to a given scope gets to own it. If you want multiple
|
||||
people to be able to publish to the same scope, you can send a pull request to
|
||||
the [pesde-index GitHub repository](https://github.com/daimond113/pesde-index)
|
||||
the [pesde-index GitHub repository](https://github.com/pesde-pkg/index)
|
||||
and add the GitHub user ID of the other person to the `owners` field of the
|
||||
`scope.toml` file of the given scope. For more information, see
|
||||
[policies](/registry/policies#package-ownership).
|
||||
|
@ -71,8 +71,8 @@ package cannot be published to the registry.
|
|||
|
||||
### `includes`
|
||||
|
||||
List of top-level files and directories to include in the package when
|
||||
publishing. Files not listed here will not be published.
|
||||
List of globs to include in the package when publishing. Files and directories
|
||||
not listed here will not be published.
|
||||
|
||||
```toml
|
||||
includes = [
|
||||
|
@ -80,7 +80,7 @@ includes = [
|
|||
"README.md",
|
||||
"LICENSE",
|
||||
"init.luau",
|
||||
"docs",
|
||||
"docs/**/*.md",
|
||||
]
|
||||
```
|
||||
|
||||
|
@ -155,6 +155,19 @@ build_files = [
|
|||
These files are passed to [`roblox_sync_config_generator`](#roblox_sync_config_generator)
|
||||
when the package is installed in order to generate the necessary configuration.
|
||||
|
||||
### `scripts`
|
||||
|
||||
**Allowed in:** `luau`, `lune`
|
||||
|
||||
A list of scripts that will be linked to the dependant's `.pesde` directory, and
|
||||
copied over to the [scripts](#scripts-1) section when initialising a project with
|
||||
this package as the scripts package.
|
||||
|
||||
```toml
|
||||
[target.scripts]
|
||||
roblox_sync_config_generator = "scripts/roblox_sync_config_generator.luau"
|
||||
```
|
||||
|
||||
## `[scripts]`
|
||||
|
||||
The `[scripts]` section contains scripts that can be run using the `pesde run`
|
||||
|
@ -177,10 +190,6 @@ sync tools.
|
|||
of files specified within the [`target.build_files`](#build_files) of the
|
||||
package.
|
||||
|
||||
You can find template scripts inside the
|
||||
[`pesde-scripts` repository](https://github.com/daimond113/pesde-scripts)
|
||||
for various sync tools.
|
||||
|
||||
<LinkCard
|
||||
title="Roblox"
|
||||
description="Learn more about using pesde in Roblox projects."
|
||||
|
@ -190,7 +199,7 @@ for various sync tools.
|
|||
<LinkCard
|
||||
title="Example script for Rojo"
|
||||
description="An example script for generating configuration for Rojo."
|
||||
href="https://github.com/daimond113/pesde-scripts/blob/master/lune/rojo/roblox_sync_config_generator.luau"
|
||||
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sync_config.luau"
|
||||
/>
|
||||
|
||||
### `sourcemap_generator`
|
||||
|
@ -205,7 +214,7 @@ through `process.args`.
|
|||
<LinkCard
|
||||
title="Example script for Rojo"
|
||||
description="An example script for generating configuration for Rojo."
|
||||
href="https://github.com/daimond113/pesde-scripts/blob/master/lune/rojo/sourcemap_generator.luau"
|
||||
href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sourcemap.luau"
|
||||
/>
|
||||
|
||||
## `[indices]`
|
||||
|
@ -215,7 +224,7 @@ installed from.
|
|||
|
||||
```toml
|
||||
[indices]
|
||||
default = "https://github.com/daimond113/pesde-index"
|
||||
default = "https://github.com/pesde-pkg/index"
|
||||
acme = "https://github.com/acme/pesde-index"
|
||||
```
|
||||
|
||||
|
@ -360,14 +369,14 @@ foo = { wally = "acme/foo", version = "1.2.3", index = "acme" }
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
foo = { repo = "acme/packages", rev = "main", path = "foo" }
|
||||
foo = { repo = "acme/packages", rev = "aeff6", path = "foo" }
|
||||
```
|
||||
|
||||
**Git dependencies** contain the following fields:
|
||||
|
||||
- `repo`: The URL of the Git repository.
|
||||
This can either be `<owner>/<name>` for a GitHub repository, or a full URL.
|
||||
- `rev`: The Git revision to install. This can be a branch, tag, or commit hash.
|
||||
- `rev`: The Git revision to install. This can be a tag or commit hash.
|
||||
- `path`: The path within the repository to install. If not specified, the root
|
||||
of the repository is used.
|
||||
|
||||
|
|
2
fly.toml
2
fly.toml
|
@ -10,7 +10,7 @@ ADDRESS = '0.0.0.0'
|
|||
PORT = '8080'
|
||||
COMMITTER_GIT_NAME = 'pesde index updater'
|
||||
COMMITTER_GIT_EMAIL = 'pesde@daimond113.com'
|
||||
INDEX_REPO_URL = 'https://github.com/daimond113/pesde-index'
|
||||
INDEX_REPO_URL = 'https://github.com/pesde-pkg/index'
|
||||
|
||||
[http_service]
|
||||
internal_port = 8080
|
||||
|
|
22
registry/CHANGELOG.md
Normal file
22
registry/CHANGELOG.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.2]
|
||||
### Changed
|
||||
- Update to pesde lib API changes by @daimond113
|
||||
|
||||
## [0.1.1] - 2024-12-19
|
||||
### Changed
|
||||
- Switch to traccing for logging by @daimond113
|
||||
|
||||
## [0.1.0] - 2024-12-14
|
||||
### Added
|
||||
- Rewrite registry for pesde v0.5.0 by @daimond113
|
||||
|
||||
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
|
@ -1,34 +1,33 @@
|
|||
[package]
|
||||
name = "pesde-registry"
|
||||
version = "0.7.0"
|
||||
version = "0.1.2"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/daimond113/pesde-index"
|
||||
repository = "https://github.com/pesde-pkg/index"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.9.0"
|
||||
actix-cors = "0.7.0"
|
||||
actix-governor = "0.7.0"
|
||||
actix-governor = "0.8.0"
|
||||
dotenvy = "0.15.7"
|
||||
thiserror = "2.0.2"
|
||||
thiserror = "2.0.7"
|
||||
tantivy = "0.22.0"
|
||||
semver = "1.0.23"
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
url = "2.5.2"
|
||||
semver = "1.0.24"
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
futures = "0.3.31"
|
||||
tokio = "1.41.0"
|
||||
tokio = "1.42.0"
|
||||
tempfile = "3.14.0"
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
|
||||
git2 = "0.19.0"
|
||||
gix = { version = "0.67.0", default-features = false, features = [
|
||||
gix = { version = "0.68.0", default-features = false, features = [
|
||||
"blocking-http-transport-reqwest-rust-tls",
|
||||
"credentials",
|
||||
] }
|
||||
|
||||
serde = "1.0.214"
|
||||
serde_json = "1.0.132"
|
||||
serde = "1.0.216"
|
||||
serde_json = "1.0.133"
|
||||
serde_yaml = "0.9.34"
|
||||
toml = "0.8.19"
|
||||
convert_case = "0.6.0"
|
||||
|
@ -39,16 +38,13 @@ reqwest = { version = "0.12.9", features = ["json", "rustls-tls"] }
|
|||
constant_time_eq = "0.3.1"
|
||||
|
||||
tokio-tar = "0.3.1"
|
||||
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
|
||||
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||
|
||||
log = "0.4.22"
|
||||
pretty_env_logger = "0.5.0"
|
||||
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
tracing-actix-web = "0.7.15"
|
||||
|
||||
sentry = "0.34.0"
|
||||
sentry-log = "0.34.0"
|
||||
sentry-actix = "0.34.0"
|
||||
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
|
||||
sentry-actix = "0.35.0"
|
||||
|
||||
pesde = { path = "..", features = [
|
||||
"wally-compat",
|
||||
"git2",
|
||||
] }
|
||||
pesde = { path = "..", features = ["wally-compat"] }
|
||||
|
|
|
@ -41,11 +41,11 @@ impl AuthImpl for GitHubAuth {
|
|||
{
|
||||
Ok(response) => match response.error_for_status_ref() {
|
||||
Ok(_) => response,
|
||||
Err(e) if e.status().is_some_and(|s| s == StatusCode::UNAUTHORIZED) => {
|
||||
Err(e) if e.status().is_some_and(|s| s == StatusCode::NOT_FOUND) => {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(_) => {
|
||||
log::error!(
|
||||
tracing::error!(
|
||||
"failed to get user: {}",
|
||||
response.into_error().await.unwrap_err()
|
||||
);
|
||||
|
@ -53,7 +53,7 @@ impl AuthImpl for GitHubAuth {
|
|||
}
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!("failed to get user: {e}");
|
||||
tracing::error!("failed to get user: {e}");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
@ -61,7 +61,7 @@ impl AuthImpl for GitHubAuth {
|
|||
let user_id = match response.json::<UserResponse>().await {
|
||||
Ok(resp) => resp.user.id,
|
||||
Err(e) => {
|
||||
log::error!("failed to get user: {e}");
|
||||
tracing::error!("failed to get user: {e}");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -184,14 +184,10 @@ pub fn get_auth_from_env(config: &IndexConfig) -> Auth {
|
|||
}
|
||||
|
||||
pub fn get_token_from_req(req: &ServiceRequest) -> Option<String> {
|
||||
let token = match req
|
||||
let token = req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|token| token.to_str().ok())
|
||||
{
|
||||
Some(token) => token,
|
||||
None => return None,
|
||||
};
|
||||
.and_then(|token| token.to_str().ok())?;
|
||||
|
||||
let token = if token.to_lowercase().starts_with("bearer ") {
|
||||
token[7..].to_string()
|
||||
|
|
|
@ -71,7 +71,7 @@ pub async fn get_package_version(
|
|||
|
||||
let (scope, name_part) = name.as_str();
|
||||
|
||||
let entries: IndexFile = {
|
||||
let file: IndexFile = {
|
||||
let source = app_state.source.lock().await;
|
||||
let repo = gix::open(source.path(&app_state.project))?;
|
||||
let tree = root_tree(&repo)?;
|
||||
|
@ -84,14 +84,15 @@ pub async fn get_package_version(
|
|||
|
||||
let Some((v_id, entry, targets)) = ({
|
||||
let version = match version {
|
||||
VersionRequest::Latest => match entries.keys().map(|k| k.version()).max() {
|
||||
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() {
|
||||
Some(latest) => latest.clone(),
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
},
|
||||
VersionRequest::Specific(version) => version,
|
||||
};
|
||||
|
||||
let versions = entries
|
||||
let versions = file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(v_id, _)| *v_id.version() == version);
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ pub async fn get_package_versions(
|
|||
|
||||
let (scope, name_part) = name.as_str();
|
||||
|
||||
let versions: IndexFile = {
|
||||
let file: IndexFile = {
|
||||
let source = app_state.source.lock().await;
|
||||
let repo = gix::open(source.path(&app_state.project))?;
|
||||
let tree = root_tree(&repo)?;
|
||||
|
@ -32,7 +32,7 @@ pub async fn get_package_versions(
|
|||
|
||||
let mut responses = BTreeMap::new();
|
||||
|
||||
for (v_id, entry) in versions {
|
||||
for (v_id, entry) in file.entries {
|
||||
let info = responses
|
||||
.entry(v_id.version().clone())
|
||||
.or_insert_with(|| PackageResponse {
|
||||
|
|
|
@ -66,25 +66,6 @@ struct DocEntryInfo {
|
|||
collapsed: bool,
|
||||
}
|
||||
|
||||
fn compare_repo_urls(this: &gix::Url, external: &gix::Url) -> bool {
|
||||
let this = this.to_bstring().to_string().to_lowercase();
|
||||
let external = external.to_bstring().to_string().to_lowercase();
|
||||
|
||||
let this = if this.ends_with(".git") {
|
||||
&this[..this.len() - 4]
|
||||
} else {
|
||||
&this
|
||||
};
|
||||
|
||||
let external = if external.ends_with(".git") {
|
||||
&external[..external.len() - 4]
|
||||
} else {
|
||||
&external
|
||||
};
|
||||
|
||||
this == external
|
||||
}
|
||||
|
||||
pub async fn publish_package(
|
||||
app_state: web::Data<AppState>,
|
||||
bytes: Bytes,
|
||||
|
@ -321,8 +302,9 @@ pub async fn publish_package(
|
|||
.index
|
||||
.as_deref()
|
||||
.filter(|index| match gix::Url::try_from(*index) {
|
||||
Ok(_) if config.other_registries_allowed => true,
|
||||
Ok(url) => compare_repo_urls(source.repo_url(), &url),
|
||||
Ok(url) => config
|
||||
.other_registries_allowed
|
||||
.is_allowed_or_same(source.repo_url().clone(), url),
|
||||
Err(_) => false,
|
||||
})
|
||||
.is_none()
|
||||
|
@ -333,16 +315,13 @@ pub async fn publish_package(
|
|||
}
|
||||
}
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
if !config.wally_allowed {
|
||||
return Err(Error::InvalidArchive(
|
||||
"wally dependencies are not allowed".into(),
|
||||
));
|
||||
}
|
||||
|
||||
if specifier
|
||||
.index
|
||||
.as_ref()
|
||||
.filter(|index| index.parse::<url::Url>().is_ok())
|
||||
.as_deref()
|
||||
.filter(|index| match gix::Url::try_from(*index) {
|
||||
Ok(url) => config.wally_allowed.is_allowed(url),
|
||||
Err(_) => false,
|
||||
})
|
||||
.is_none()
|
||||
{
|
||||
return Err(Error::InvalidArchive(format!(
|
||||
|
@ -350,15 +329,15 @@ pub async fn publish_package(
|
|||
)));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {
|
||||
if !config.git_allowed {
|
||||
DependencySpecifiers::Git(specifier) => {
|
||||
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
|
||||
return Err(Error::InvalidArchive(
|
||||
"git dependencies are not allowed".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Workspace(_) => {
|
||||
// workspace specifiers are to be transformed into Pesde specifiers by the sender
|
||||
// workspace specifiers are to be transformed into pesde specifiers by the sender
|
||||
return Err(Error::InvalidArchive(
|
||||
"non-transformed workspace dependency".into(),
|
||||
));
|
||||
|
@ -366,8 +345,9 @@ pub async fn publish_package(
|
|||
}
|
||||
}
|
||||
|
||||
let repo = source.repo_git2(&app_state.project)?;
|
||||
let repo = Repository::open_bare(source.path(&app_state.project))?;
|
||||
let gix_repo = gix::open(repo.path())?;
|
||||
|
||||
let gix_tree = root_tree(&gix_repo)?;
|
||||
|
||||
let (scope, name) = manifest.name.as_str();
|
||||
|
@ -391,7 +371,7 @@ pub async fn publish_package(
|
|||
}
|
||||
};
|
||||
|
||||
let mut entries: IndexFile =
|
||||
let mut file: IndexFile =
|
||||
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
|
||||
|
||||
let new_entry = IndexFileEntry {
|
||||
|
@ -406,11 +386,12 @@ pub async fn publish_package(
|
|||
dependencies,
|
||||
};
|
||||
|
||||
let this_version = entries
|
||||
let this_version = file
|
||||
.entries
|
||||
.keys()
|
||||
.find(|v_id| *v_id.version() == manifest.version);
|
||||
if let Some(this_version) = this_version {
|
||||
let other_entry = entries.get(this_version).unwrap();
|
||||
let other_entry = file.entries.get(this_version).unwrap();
|
||||
|
||||
// description cannot be different - which one to render in the "Recently published" list?
|
||||
// the others cannot be different because what to return from the versions endpoint?
|
||||
|
@ -426,7 +407,8 @@ pub async fn publish_package(
|
|||
}
|
||||
}
|
||||
|
||||
if entries
|
||||
if file
|
||||
.entries
|
||||
.insert(
|
||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||
new_entry.clone(),
|
||||
|
@ -442,7 +424,7 @@ pub async fn publish_package(
|
|||
let reference = repo.find_reference(&refspec)?;
|
||||
|
||||
{
|
||||
let index_content = toml::to_string(&entries)?;
|
||||
let index_content = toml::to_string(&file)?;
|
||||
let mut blob_writer = repo.blob_writer(None)?;
|
||||
blob_writer.write_all(index_content.as_bytes())?;
|
||||
oids.push((name, blob_writer.commit()?));
|
||||
|
|
|
@ -68,10 +68,11 @@ pub async fn search_packages(
|
|||
.unwrap();
|
||||
let (scope, name) = id.as_str();
|
||||
|
||||
let versions: IndexFile =
|
||||
let file: IndexFile =
|
||||
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
|
||||
|
||||
let (latest_version, entry) = versions
|
||||
let (latest_version, entry) = file
|
||||
.entries
|
||||
.iter()
|
||||
.max_by_key(|(v_id, _)| v_id.version())
|
||||
.unwrap();
|
||||
|
@ -79,17 +80,19 @@ pub async fn search_packages(
|
|||
PackageResponse {
|
||||
name: id.to_string(),
|
||||
version: latest_version.version().to_string(),
|
||||
targets: versions
|
||||
targets: file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
||||
.map(|(_, entry)| (&entry.target).into())
|
||||
.collect(),
|
||||
description: entry.description.clone().unwrap_or_default(),
|
||||
published_at: versions
|
||||
published_at: file
|
||||
.entries
|
||||
.values()
|
||||
.max_by_key(|entry| entry.published_at)
|
||||
.unwrap()
|
||||
.published_at,
|
||||
.map(|entry| entry.published_at)
|
||||
.max()
|
||||
.unwrap(),
|
||||
license: entry.license.clone().unwrap_or_default(),
|
||||
authors: entry.authors.clone(),
|
||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
||||
use log::error;
|
||||
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
@ -67,7 +66,7 @@ impl ResponseError for Error {
|
|||
error: format!("archive is invalid: {e}"),
|
||||
}),
|
||||
e => {
|
||||
log::error!("unhandled error: {e:?}");
|
||||
tracing::error!("unhandled error: {e:?}");
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,19 +6,22 @@ use crate::{
|
|||
use actix_cors::Cors;
|
||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||
use actix_web::{
|
||||
middleware::{from_fn, Compress, Logger, NormalizePath, TrailingSlash},
|
||||
middleware::{from_fn, Compress, NormalizePath, TrailingSlash},
|
||||
rt::System,
|
||||
web,
|
||||
web::PayloadConfig,
|
||||
App, HttpServer,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use log::info;
|
||||
use pesde::{
|
||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||
AuthConfig, Project,
|
||||
};
|
||||
use std::{env::current_dir, path::PathBuf};
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::{
|
||||
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
|
||||
};
|
||||
|
||||
mod auth;
|
||||
mod endpoints;
|
||||
|
@ -116,12 +119,12 @@ async fn run() -> std::io::Result<()> {
|
|||
let app_data = web::Data::new(AppState {
|
||||
storage: {
|
||||
let storage = get_storage_from_env();
|
||||
info!("storage: {storage}");
|
||||
tracing::info!("storage: {storage}");
|
||||
storage
|
||||
},
|
||||
auth: {
|
||||
let auth = get_auth_from_env(&config);
|
||||
info!("auth: {auth}");
|
||||
tracing::info!("auth: {auth}");
|
||||
auth
|
||||
},
|
||||
source: tokio::sync::Mutex::new(source),
|
||||
|
@ -140,14 +143,12 @@ async fn run() -> std::io::Result<()> {
|
|||
.finish()
|
||||
.unwrap();
|
||||
|
||||
info!("listening on {address}:{port}");
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(sentry_actix::Sentry::with_transaction())
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||
.wrap(Cors::permissive())
|
||||
.wrap(Logger::default())
|
||||
.wrap(tracing_actix_web::TracingLogger::default())
|
||||
.wrap(Compress::default())
|
||||
.app_data(app_data.clone())
|
||||
.route(
|
||||
|
@ -200,12 +201,26 @@ async fn run() -> std::io::Result<()> {
|
|||
fn main() -> std::io::Result<()> {
|
||||
let _ = dotenvy::dotenv();
|
||||
|
||||
let mut log_builder = pretty_env_logger::formatted_builder();
|
||||
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
|
||||
let tracing_env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy()
|
||||
.add_directive("reqwest=info".parse().unwrap())
|
||||
.add_directive("rustls=info".parse().unwrap())
|
||||
.add_directive("tokio_util=info".parse().unwrap())
|
||||
.add_directive("goblin=info".parse().unwrap())
|
||||
.add_directive("tower=info".parse().unwrap())
|
||||
.add_directive("hyper=info".parse().unwrap())
|
||||
.add_directive("h2=info".parse().unwrap());
|
||||
|
||||
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
|
||||
log::set_boxed_logger(Box::new(logger)).unwrap();
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_env_filter)
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.compact()
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
|
||||
)
|
||||
.with(sentry::integrations::tracing::layer())
|
||||
.init();
|
||||
|
||||
let guard = sentry::init(sentry::ClientOptions {
|
||||
release: sentry::release_name!(),
|
||||
|
@ -218,9 +233,9 @@ fn main() -> std::io::Result<()> {
|
|||
|
||||
if guard.is_enabled() {
|
||||
std::env::set_var("RUST_BACKTRACE", "full");
|
||||
info!("sentry initialized");
|
||||
tracing::info!("sentry initialized");
|
||||
} else {
|
||||
info!("sentry **NOT** initialized");
|
||||
tracing::info!("sentry **NOT** initialized");
|
||||
}
|
||||
|
||||
System::new().block_on(run())
|
||||
|
|
|
@ -8,6 +8,8 @@ pub struct TargetInfo {
|
|||
kind: TargetKind,
|
||||
lib: bool,
|
||||
bin: bool,
|
||||
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||
scripts: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl From<Target> for TargetInfo {
|
||||
|
@ -22,6 +24,10 @@ impl From<&Target> for TargetInfo {
|
|||
kind: target.kind(),
|
||||
lib: target.lib_path().is_some(),
|
||||
bin: target.bin_path().is_some(),
|
||||
scripts: target
|
||||
.scripts()
|
||||
.map(|scripts| scripts.keys().cloned().collect())
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,8 +104,8 @@ pub async fn make_search(
|
|||
pin!(stream);
|
||||
|
||||
while let Some((pkg_name, mut file)) = stream.next().await {
|
||||
let Some((_, latest_entry)) = file.pop_last() else {
|
||||
log::warn!("no versions found for {pkg_name}");
|
||||
let Some((_, latest_entry)) = file.entries.pop_last() else {
|
||||
tracing::error!("no versions found for {pkg_name}");
|
||||
continue;
|
||||
};
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ use keyring::Entry;
|
|||
use reqwest::header::AUTHORIZATION;
|
||||
use serde::{ser::SerializeMap, Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Tokens(pub BTreeMap<gix::Url, String>);
|
||||
|
@ -37,15 +38,20 @@ impl<'de> Deserialize<'de> for Tokens {
|
|||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
||||
let config = read_config().await?;
|
||||
if !config.tokens.0.is_empty() {
|
||||
tracing::debug!("using tokens from config");
|
||||
return Ok(config.tokens);
|
||||
}
|
||||
|
||||
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
||||
Ok(entry) => match entry.get_password() {
|
||||
Ok(token) => return serde_json::from_str(&token).context("failed to parse tokens"),
|
||||
Ok(token) => {
|
||||
tracing::debug!("using tokens from keyring");
|
||||
return serde_json::from_str(&token).context("failed to parse tokens");
|
||||
}
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
},
|
||||
|
@ -56,16 +62,22 @@ pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
|||
Ok(Tokens(BTreeMap::new()))
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
|
||||
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
||||
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
||||
|
||||
match entry.set_password(&json) {
|
||||
Ok(()) => return Ok(()),
|
||||
Ok(()) => {
|
||||
tracing::debug!("tokens saved to keyring");
|
||||
return Ok(());
|
||||
}
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
tracing::debug!("tokens saved to config");
|
||||
|
||||
let mut config = read_config().await?;
|
||||
config.tokens = tokens;
|
||||
write_config(&config).await.map_err(Into::into)
|
||||
|
@ -86,6 +98,7 @@ struct UserResponse {
|
|||
login: String,
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn get_token_login(
|
||||
reqwest: &reqwest::Client,
|
||||
access_token: &str,
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use std::str::FromStr;
|
||||
use std::{collections::HashSet, str::FromStr};
|
||||
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use semver::VersionReq;
|
||||
|
||||
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
||||
|
@ -62,7 +63,7 @@ impl AddCommand {
|
|||
.cloned();
|
||||
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
log::error!("index {index} not found");
|
||||
println!("{}: index {index} not found", "error".red().bold());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -89,7 +90,7 @@ impl AddCommand {
|
|||
.cloned();
|
||||
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
log::error!("wally index {index} not found");
|
||||
println!("{}: wally index {index} not found", "error".red().bold());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -133,14 +134,19 @@ impl AddCommand {
|
|||
.context("failed to refresh package source")?;
|
||||
|
||||
let Some(version_id) = source
|
||||
.resolve(&specifier, &project, manifest.target.kind())
|
||||
.resolve(
|
||||
&specifier,
|
||||
&project,
|
||||
manifest.target.kind(),
|
||||
&mut HashSet::new(),
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
else {
|
||||
log::error!("no versions found for package {specifier}");
|
||||
println!("{}: no versions found for package", "error".red().bold());
|
||||
|
||||
return Ok(());
|
||||
};
|
||||
|
|
|
@ -4,6 +4,7 @@ use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
|
|||
|
||||
mod login;
|
||||
mod logout;
|
||||
mod token;
|
||||
mod whoami;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
|
@ -25,6 +26,8 @@ pub enum AuthCommands {
|
|||
/// Prints the username of the currently logged-in user
|
||||
#[clap(name = "whoami")]
|
||||
WhoAmI(whoami::WhoAmICommand),
|
||||
/// Prints the token for an index
|
||||
Token(token::TokenCommand),
|
||||
}
|
||||
|
||||
impl AuthSubcommand {
|
||||
|
@ -64,6 +67,7 @@ impl AuthSubcommand {
|
|||
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
|
||||
AuthCommands::Logout(logout) => logout.run(index_url).await,
|
||||
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
|
||||
AuthCommands::Token(token) => token.run(index_url).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
22
src/cli/commands/auth/token.rs
Normal file
22
src/cli/commands/auth/token.rs
Normal file
|
@ -0,0 +1,22 @@
|
|||
use crate::cli::auth::get_tokens;
|
||||
use clap::Args;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct TokenCommand {}
|
||||
|
||||
impl TokenCommand {
|
||||
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||
let tokens = get_tokens().await?;
|
||||
let token = match tokens.0.get(&index_url) {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
println!("not logged in into {index_url}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
println!("token for {index_url}: \"{token}\"");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,22 +1,17 @@
|
|||
use clap::Subcommand;
|
||||
|
||||
mod default_index;
|
||||
mod scripts_repo;
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum ConfigCommands {
|
||||
/// Configuration for the default index
|
||||
DefaultIndex(default_index::DefaultIndexCommand),
|
||||
|
||||
/// Configuration for the scripts repository
|
||||
ScriptsRepo(scripts_repo::ScriptsRepoCommand),
|
||||
}
|
||||
|
||||
impl ConfigCommands {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
match self {
|
||||
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
||||
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run().await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
use crate::cli::{
|
||||
config::{read_config, write_config, CliConfig},
|
||||
home_dir,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use fs_err::tokio as fs;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ScriptsRepoCommand {
|
||||
/// The new repo URL to set as default, don't pass any value to check the current default repo
|
||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
||||
repo: Option<gix::Url>,
|
||||
|
||||
/// Resets the default repo to the default value
|
||||
#[arg(short, long, conflicts_with = "repo")]
|
||||
reset: bool,
|
||||
}
|
||||
|
||||
impl ScriptsRepoCommand {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
let mut config = read_config().await?;
|
||||
|
||||
let repo = if self.reset {
|
||||
Some(CliConfig::default().scripts_repo)
|
||||
} else {
|
||||
self.repo
|
||||
};
|
||||
|
||||
match repo {
|
||||
Some(repo) => {
|
||||
config.scripts_repo = repo.clone();
|
||||
write_config(&config).await?;
|
||||
|
||||
fs::remove_dir_all(home_dir()?.join("scripts"))
|
||||
.await
|
||||
.context("failed to remove scripts directory")?;
|
||||
|
||||
println!("scripts repo set to: {repo}");
|
||||
}
|
||||
None => {
|
||||
println!("current scripts repo: {}", config.scripts_repo);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use crate::cli::{config::read_config, VersionedPackageName};
|
||||
use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use fs_err::tokio as fs;
|
||||
|
@ -13,7 +13,10 @@ use pesde::{
|
|||
Project,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use std::{env::current_dir, ffi::OsString, io::Write, process::Command};
|
||||
use std::{
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ExecuteCommand {
|
||||
|
@ -53,7 +56,7 @@ impl ExecuteCommand {
|
|||
};
|
||||
|
||||
if let Some(res) = source
|
||||
.resolve(&specifier, &project, TargetKind::Lune)
|
||||
.resolve(&specifier, &project, TargetKind::Lune, &mut HashSet::new())
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
|
@ -63,7 +66,7 @@ impl ExecuteCommand {
|
|||
}
|
||||
|
||||
source
|
||||
.resolve(&specifier, &project, TargetKind::Luau)
|
||||
.resolve(&specifier, &project, TargetKind::Luau, &mut HashSet::new())
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
|
@ -75,7 +78,22 @@ impl ExecuteCommand {
|
|||
);
|
||||
};
|
||||
|
||||
log::info!("found package {}@{version}", pkg_ref.name);
|
||||
println!("using {}@{version}", pkg_ref.name);
|
||||
|
||||
let tmp_dir = project.cas_dir().join(".tmp");
|
||||
fs::create_dir_all(&tmp_dir)
|
||||
.await
|
||||
.context("failed to create temporary directory")?;
|
||||
let tempdir =
|
||||
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
||||
|
||||
let project = Project::new(
|
||||
tempdir.path(),
|
||||
None::<std::path::PathBuf>,
|
||||
project.data_dir(),
|
||||
project.cas_dir(),
|
||||
project.auth_config().clone(),
|
||||
);
|
||||
|
||||
let (fs, target) = source
|
||||
.download(&pkg_ref, &project, &reqwest)
|
||||
|
@ -83,18 +101,43 @@ impl ExecuteCommand {
|
|||
.context("failed to download package")?;
|
||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||
|
||||
let tmp_dir = project.cas_dir().join(".tmp");
|
||||
fs::create_dir_all(&tmp_dir)
|
||||
.await
|
||||
.context("failed to create temporary directory")?;
|
||||
|
||||
let tempdir =
|
||||
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
||||
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(None, &mut refreshed_sources, true)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
true,
|
||||
true,
|
||||
|_| async { Ok::<_, std::io::Error>(()) },
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
"📥 ".to_string(),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
downloaded_graph
|
||||
.await
|
||||
.context("failed to download & link dependencies")?;
|
||||
|
||||
let mut caller =
|
||||
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
||||
caller
|
||||
|
|
|
@ -1,28 +1,39 @@
|
|||
use std::{path::Path, str::FromStr};
|
||||
|
||||
use crate::cli::config::read_config;
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use inquire::validator::Validation;
|
||||
|
||||
use pesde::{
|
||||
errors::ManifestReadError, names::PackageName, scripts::ScriptName, Project, DEFAULT_INDEX_NAME,
|
||||
errors::ManifestReadError,
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::GitBasedSource,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageSource,
|
||||
},
|
||||
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||
};
|
||||
|
||||
use crate::cli::{config::read_config, HOME_DIR};
|
||||
use fs_err::tokio as fs;
|
||||
use semver::VersionReq;
|
||||
use std::{collections::HashSet, fmt::Display, str::FromStr};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct InitCommand {}
|
||||
|
||||
fn script_contents(path: &Path) -> String {
|
||||
format!(
|
||||
r#"local process = require("@lune/process")
|
||||
local home_dir = if process.os == "windows" then process.env.userprofile else process.env.HOME
|
||||
#[derive(Debug)]
|
||||
enum PackageNameOrCustom {
|
||||
PackageName(PackageName),
|
||||
Custom,
|
||||
}
|
||||
|
||||
require(home_dir .. {:?})"#,
|
||||
format!("/{HOME_DIR}/scripts/{}", path.display())
|
||||
)
|
||||
impl Display for PackageNameOrCustom {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
|
||||
PackageNameOrCustom::Custom => write!(f, "custom"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InitCommand {
|
||||
|
@ -39,7 +50,7 @@ impl InitCommand {
|
|||
let mut manifest = toml_edit::DocumentMut::new();
|
||||
|
||||
manifest["name"] = toml_edit::value(
|
||||
inquire::Text::new("What is the name of the project?")
|
||||
inquire::Text::new("what is the name of the project?")
|
||||
.with_validator(|name: &str| {
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
|
@ -51,20 +62,19 @@ impl InitCommand {
|
|||
);
|
||||
manifest["version"] = toml_edit::value("0.1.0");
|
||||
|
||||
let description =
|
||||
inquire::Text::new("What is the description of the project? (leave empty for none)")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let description = inquire::Text::new("what is the description of the project?")
|
||||
.with_help_message("a short description of the project. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
if !description.is_empty() {
|
||||
manifest["description"] = toml_edit::value(description);
|
||||
}
|
||||
|
||||
let authors = inquire::Text::new(
|
||||
"Who are the authors of this project? (leave empty for none, comma separated)",
|
||||
)
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let authors = inquire::Text::new("who are the authors of this project?")
|
||||
.with_help_message("comma separated list. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
let authors = authors
|
||||
.split(',')
|
||||
|
@ -76,106 +86,177 @@ impl InitCommand {
|
|||
manifest["authors"] = toml_edit::value(authors);
|
||||
}
|
||||
|
||||
let repo = inquire::Text::new(
|
||||
"What is the repository URL of this project? (leave empty for none)",
|
||||
)
|
||||
.with_validator(|repo: &str| {
|
||||
if repo.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
let repo = inquire::Text::new("what is the repository URL of this project?")
|
||||
.with_validator(|repo: &str| {
|
||||
if repo.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
|
||||
Ok(match url::Url::parse(repo) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
Ok(match url::Url::parse(repo) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
})
|
||||
.prompt()
|
||||
.unwrap();
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !repo.is_empty() {
|
||||
manifest["repository"] = toml_edit::value(repo);
|
||||
}
|
||||
|
||||
let license =
|
||||
inquire::Text::new("What is the license of this project? (leave empty for none)")
|
||||
.with_initial_value("MIT")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let license = inquire::Text::new("what is the license of this project?")
|
||||
.with_initial_value("MIT")
|
||||
.with_help_message("an SPDX license identifier. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !license.is_empty() {
|
||||
manifest["license"] = toml_edit::value(license);
|
||||
}
|
||||
|
||||
let target_env = inquire::Select::new(
|
||||
"What environment are you targeting for your package?",
|
||||
vec!["roblox", "roblox_server", "lune", "luau"],
|
||||
"what environment are you targeting for your package?",
|
||||
TargetKind::VARIANTS.to_vec(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
["environment"] = toml_edit::value(target_env);
|
||||
["environment"] = toml_edit::value(target_env.to_string());
|
||||
|
||||
if target_env == "roblox"
|
||||
|| target_env == "roblox_server"
|
||||
|| inquire::Confirm::new(&format!(
|
||||
"Would you like to setup a default {} script?",
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
))
|
||||
.prompt()
|
||||
let source = PesdePackageSource::new(read_config().await?.default_index);
|
||||
|
||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||
|
||||
if target_env.is_roblox()
|
||||
|| inquire::prompt_confirmation(
|
||||
"would you like to setup default Roblox compatibility scripts?",
|
||||
)
|
||||
.unwrap()
|
||||
{
|
||||
let folder = project
|
||||
.package_dir()
|
||||
.join(concat!(".", env!("CARGO_PKG_NAME")));
|
||||
fs::create_dir_all(&folder)
|
||||
PackageSource::refresh(&source, &project)
|
||||
.await
|
||||
.context("failed to create scripts folder")?;
|
||||
.context("failed to refresh package source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.context("failed to get source config")?;
|
||||
|
||||
fs::write(
|
||||
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
|
||||
script_contents(Path::new(&format!(
|
||||
"lune/rojo/{}.luau",
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
))),
|
||||
)
|
||||
.await
|
||||
.context("failed to write sync config generator script file")?;
|
||||
let scripts_package = if config.scripts_packages.is_empty() {
|
||||
PackageNameOrCustom::Custom
|
||||
} else {
|
||||
inquire::Select::new(
|
||||
"which scripts package do you want to use?",
|
||||
config
|
||||
.scripts_packages
|
||||
.into_iter()
|
||||
.map(PackageNameOrCustom::PackageName)
|
||||
.chain(std::iter::once(PackageNameOrCustom::Custom))
|
||||
.collect(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
fs::write(
|
||||
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
|
||||
script_contents(Path::new(&format!(
|
||||
"lune/rojo/{}.luau",
|
||||
ScriptName::SourcemapGenerator
|
||||
))),
|
||||
)
|
||||
.await
|
||||
.context("failed to write sourcemap generator script file")?;
|
||||
let scripts_package = match scripts_package {
|
||||
PackageNameOrCustom::PackageName(p) => Some(p),
|
||||
PackageNameOrCustom::Custom => {
|
||||
let name = inquire::Text::new("which scripts package to use?")
|
||||
.with_validator(|name: &str| {
|
||||
if name.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
|
||||
let scripts =
|
||||
manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
scripts[&ScriptName::RobloxSyncConfigGenerator.to_string()] =
|
||||
toml_edit::value(format!(
|
||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
));
|
||||
if name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PackageName::from_str(&name).unwrap())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
{
|
||||
scripts[&ScriptName::SourcemapGenerator.to_string()] = toml_edit::value(format!(
|
||||
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
|
||||
ScriptName::SourcemapGenerator
|
||||
));
|
||||
if let Some(scripts_pkg_name) = scripts_package {
|
||||
let (v_id, pkg_ref) = source
|
||||
.resolve(
|
||||
&PesdeDependencySpecifier {
|
||||
name: scripts_pkg_name,
|
||||
version: VersionReq::STAR,
|
||||
index: None,
|
||||
target: None,
|
||||
},
|
||||
&project,
|
||||
TargetKind::Lune,
|
||||
&mut HashSet::new(),
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve scripts package")?
|
||||
.1
|
||||
.pop_last()
|
||||
.context("scripts package not found")?;
|
||||
|
||||
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
|
||||
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
|
||||
};
|
||||
|
||||
let scripts_field = &mut manifest["scripts"]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
|
||||
for script_name in scripts.keys() {
|
||||
scripts_field[script_name] = toml_edit::value(format!(
|
||||
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
|
||||
));
|
||||
}
|
||||
|
||||
let dev_deps = &mut manifest["dev_dependencies"]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
|
||||
let field = &mut dev_deps["scripts"];
|
||||
field["name"] = toml_edit::value(pkg_ref.name.to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
|
||||
field["target"] = toml_edit::value(v_id.target().to_string());
|
||||
|
||||
for (alias, (spec, ty)) in pkg_ref.dependencies {
|
||||
if ty != DependencyType::Peer {
|
||||
continue;
|
||||
}
|
||||
|
||||
let DependencySpecifiers::Pesde(spec) = spec else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let field = &mut dev_deps[alias];
|
||||
field["name"] = toml_edit::value(spec.name.to_string());
|
||||
field["version"] = toml_edit::value(spec.version.to_string());
|
||||
field["target"] =
|
||||
toml_edit::value(spec.target.unwrap_or_else(|| *v_id.target()).to_string());
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"{}",
|
||||
"no scripts package configured, this can cause issues with Roblox compatibility".red()
|
||||
);
|
||||
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[DEFAULT_INDEX_NAME] =
|
||||
toml_edit::value(read_config().await?.default_index.to_bstring().to_string());
|
||||
|
||||
project.write_manifest(manifest.to_string()).await?;
|
||||
|
||||
println!("{}", "initialized project".green());
|
||||
println!(
|
||||
"{}\n{}: run `install` to fully finish setup",
|
||||
"initialized project".green(),
|
||||
"tip".cyan().bold()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +1,20 @@
|
|||
use crate::cli::{
|
||||
bin_dir, files::make_executable, progress_bar, repos::update_scripts, run_on_workspace_members,
|
||||
up_to_date_lockfile,
|
||||
bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use colored::{ColoredString, Colorize};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use indicatif::MultiProgress;
|
||||
use pesde::{
|
||||
lockfile::Lockfile,
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
Project, MANIFEST_FILE_NAME,
|
||||
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
|
||||
MANIFEST_FILE_NAME,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeSet, HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Args, Copy, Clone)]
|
||||
pub struct InstallCommand {
|
||||
|
@ -46,12 +44,8 @@ fn bin_link_file(alias: &str) -> String {
|
|||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let prefix = String::new();
|
||||
#[cfg(unix)]
|
||||
let prefix = "#!/usr/bin/env -S lune run\n";
|
||||
format!(
|
||||
r#"{prefix}local process = require("@lune/process")
|
||||
r#"local process = require("@lune/process")
|
||||
local fs = require("@lune/fs")
|
||||
local stdio = require("@lune/stdio")
|
||||
|
||||
|
@ -81,21 +75,20 @@ stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the r
|
|||
}
|
||||
|
||||
#[cfg(feature = "patches")]
|
||||
const JOBS: u8 = 6;
|
||||
#[cfg(not(feature = "patches"))]
|
||||
const JOBS: u8 = 5;
|
||||
#[cfg(not(feature = "patches"))]
|
||||
const JOBS: u8 = 4;
|
||||
|
||||
fn job(n: u8) -> ColoredString {
|
||||
format!("[{n}/{JOBS}]").dimmed().bold()
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
struct CallbackError(#[from] anyhow::Error);
|
||||
|
||||
impl InstallCommand {
|
||||
pub async fn run(
|
||||
self,
|
||||
project: Project,
|
||||
multi: MultiProgress,
|
||||
reqwest: reqwest::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
|
||||
let manifest = project
|
||||
|
@ -117,10 +110,10 @@ impl InstallCommand {
|
|||
match project.deser_lockfile().await {
|
||||
Ok(lockfile) => {
|
||||
if lockfile.overrides != manifest.overrides {
|
||||
log::debug!("overrides are different");
|
||||
tracing::debug!("overrides are different");
|
||||
None
|
||||
} else if lockfile.target != manifest.target.kind() {
|
||||
log::debug!("target kind is different");
|
||||
tracing::debug!("target kind is different");
|
||||
None
|
||||
} else {
|
||||
Some(lockfile)
|
||||
|
@ -135,9 +128,6 @@ impl InstallCommand {
|
|||
}
|
||||
};
|
||||
|
||||
let project_2 = project.clone();
|
||||
let update_scripts_handle = tokio::spawn(async move { update_scripts(&project_2).await });
|
||||
|
||||
println!(
|
||||
"\n{}\n",
|
||||
format!("[now installing {} {}]", manifest.name, manifest.target)
|
||||
|
@ -157,7 +147,7 @@ impl InstallCommand {
|
|||
deleted_folders
|
||||
.entry(folder.to_string())
|
||||
.or_insert_with(|| async move {
|
||||
log::debug!("deleting the {folder} folder");
|
||||
tracing::debug!("deleting the {folder} folder");
|
||||
|
||||
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
|
||||
.await
|
||||
|
@ -195,125 +185,124 @@ impl InstallCommand {
|
|||
println!("{} 📦 building dependency graph", job(2));
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources)
|
||||
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
update_scripts_handle.await??;
|
||||
let bin_folder = bin_dir().await?;
|
||||
|
||||
let downloaded_graph = {
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, self.prod, true)
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
self.prod,
|
||||
true,
|
||||
|graph| {
|
||||
let graph = graph.clone();
|
||||
|
||||
async move {
|
||||
try_join_all(
|
||||
graph
|
||||
.values()
|
||||
.flat_map(|versions| versions.values())
|
||||
.filter(|node| node.target.bin_path().is_some())
|
||||
.filter_map(|node| node.node.direct.as_ref())
|
||||
.map(|(alias, _, _)| alias)
|
||||
.filter(|alias| {
|
||||
if *alias == env!("CARGO_BIN_NAME") {
|
||||
tracing::warn!(
|
||||
"package {alias} has the same name as the CLI, skipping bin link"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|alias| {
|
||||
let bin_folder = bin_folder.clone();
|
||||
async move {
|
||||
let bin_exec_file = bin_folder.join(alias).with_extension(std::env::consts::EXE_EXTENSION);
|
||||
|
||||
let impl_folder = bin_folder.join(".impl");
|
||||
fs::create_dir_all(&impl_folder).await.context("failed to create bin link folder")?;
|
||||
|
||||
let bin_file = impl_folder.join(alias).with_extension("luau");
|
||||
fs::write(&bin_file, bin_link_file(alias))
|
||||
.await
|
||||
.context("failed to write bin link file")?;
|
||||
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
fs::copy(
|
||||
std::env::current_exe()
|
||||
.context("failed to get current executable path")?,
|
||||
&bin_exec_file,
|
||||
)
|
||||
.await
|
||||
.context("failed to copy bin link file")?;
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
fs::write(
|
||||
&bin_exec_file,
|
||||
format!(r#"#!/bin/sh
|
||||
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
|
||||
),
|
||||
)
|
||||
.await
|
||||
.context("failed to link bin link file")?;
|
||||
}
|
||||
|
||||
make_executable(&bin_exec_file).await.context("failed to make bin link file executable")?;
|
||||
|
||||
Ok::<_, CallbackError>(())
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
&multi,
|
||||
format!("{} 📥 ", job(3)),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Arc::into_inner(downloaded_graph)
|
||||
.unwrap()
|
||||
.into_inner()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let filtered_graph = if self.prod {
|
||||
downloaded_graph
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|(n, v)| {
|
||||
(
|
||||
n,
|
||||
v.into_iter()
|
||||
.filter(|(_, n)| n.node.ty != DependencyType::Dev)
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
downloaded_graph.clone()
|
||||
.await
|
||||
.context("failed to download & link dependencies")?
|
||||
};
|
||||
|
||||
#[cfg(feature = "patches")]
|
||||
{
|
||||
let rx = project
|
||||
.apply_patches(&filtered_graph)
|
||||
.apply_patches(&filter_graph(&downloaded_graph, self.prod))
|
||||
.await
|
||||
.context("failed to apply patches")?;
|
||||
|
||||
progress_bar(
|
||||
manifest.patches.values().map(|v| v.len() as u64).sum(),
|
||||
rx,
|
||||
&multi,
|
||||
format!("{} 🩹 ", job(4)),
|
||||
format!("{} 🩹 ", job(JOBS - 1)),
|
||||
"applying patches".to_string(),
|
||||
"applied patches".to_string(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
println!("{} 🗺️ linking dependencies", job(JOBS - 1));
|
||||
|
||||
let bin_folder = bin_dir().await?;
|
||||
|
||||
try_join_all(
|
||||
filtered_graph
|
||||
.values()
|
||||
.flat_map(|versions| versions.values())
|
||||
.filter(|node| node.target.bin_path().is_some())
|
||||
.filter_map(|node| node.node.direct.as_ref())
|
||||
.map(|(alias, _)| alias)
|
||||
.filter(|alias| {
|
||||
if *alias == env!("CARGO_BIN_NAME") {
|
||||
log::warn!(
|
||||
"package {alias} has the same name as the CLI, skipping bin link"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|alias| {
|
||||
let bin_folder = bin_folder.clone();
|
||||
async move {
|
||||
let bin_file = bin_folder.join(alias);
|
||||
fs::write(&bin_file, bin_link_file(alias))
|
||||
.await
|
||||
.context("failed to write bin link file")?;
|
||||
|
||||
make_executable(&bin_file)
|
||||
.await
|
||||
.context("failed to make bin link executable")?;
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
|
||||
fs::copy(
|
||||
std::env::current_exe()
|
||||
.context("failed to get current executable path")?,
|
||||
&bin_file,
|
||||
)
|
||||
.await
|
||||
.context("failed to copy bin link file")?;
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
project
|
||||
.link_dependencies(&filtered_graph)
|
||||
.await
|
||||
.context("failed to link dependencies")?;
|
||||
|
||||
println!("{} 🧹 finishing up", job(JOBS));
|
||||
|
||||
project
|
||||
|
@ -326,9 +315,8 @@ impl InstallCommand {
|
|||
graph: downloaded_graph,
|
||||
|
||||
workspace: run_on_workspace_members(&project, |project| {
|
||||
let multi = multi.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
async move { Box::pin(self.run(project, multi, reqwest)).await }
|
||||
async move { Box::pin(self.run(project, reqwest)).await }
|
||||
})
|
||||
.await?,
|
||||
})
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use indicatif::MultiProgress;
|
||||
use pesde::Project;
|
||||
|
||||
mod add;
|
||||
|
@ -72,18 +71,13 @@ pub enum Subcommand {
|
|||
}
|
||||
|
||||
impl Subcommand {
|
||||
pub async fn run(
|
||||
self,
|
||||
project: Project,
|
||||
multi: MultiProgress,
|
||||
reqwest: reqwest::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
match self {
|
||||
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
||||
Subcommand::Config(config) => config.run().await,
|
||||
Subcommand::Init(init) => init.run(project).await,
|
||||
Subcommand::Run(run) => run.run(project).await,
|
||||
Subcommand::Install(install) => install.run(project, multi, reqwest).await,
|
||||
Subcommand::Install(install) => install.run(project, reqwest).await,
|
||||
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
||||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
||||
|
@ -94,7 +88,7 @@ impl Subcommand {
|
|||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
||||
Subcommand::Add(add) => add.run(project).await,
|
||||
Subcommand::Update(update) => update.run(project, multi, reqwest).await,
|
||||
Subcommand::Update(update) => update.run(project, reqwest).await,
|
||||
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
||||
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
|
||||
}
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use futures::future::try_join_all;
|
||||
use semver::VersionReq;
|
||||
|
||||
use crate::cli::up_to_date_lockfile;
|
||||
use pesde::{
|
||||
refresh_sources,
|
||||
source::{
|
||||
|
@ -15,6 +11,9 @@ use pesde::{
|
|||
},
|
||||
Project,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct OutdatedCommand {
|
||||
|
@ -53,22 +52,25 @@ impl OutdatedCommand {
|
|||
)
|
||||
.await?;
|
||||
|
||||
try_join_all(
|
||||
let refreshed_sources = Arc::new(Mutex::new(refreshed_sources));
|
||||
|
||||
if try_join_all(
|
||||
graph
|
||||
.into_iter()
|
||||
.flat_map(|(_, versions)| versions.into_iter())
|
||||
.map(|(current_version_id, node)| {
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
let Some((alias, mut specifier)) = node.node.direct else {
|
||||
return Ok::<(), anyhow::Error>(());
|
||||
let Some((alias, mut specifier, _)) = node.node.direct else {
|
||||
return Ok::<bool, anyhow::Error>(true);
|
||||
};
|
||||
|
||||
if matches!(
|
||||
specifier,
|
||||
DependencySpecifiers::Git(_) | DependencySpecifiers::Workspace(_)
|
||||
) {
|
||||
return Ok(());
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let source = node.node.pkg_ref.source();
|
||||
|
@ -88,7 +90,12 @@ impl OutdatedCommand {
|
|||
}
|
||||
|
||||
let version_id = source
|
||||
.resolve(&specifier, &project, manifest_target_kind)
|
||||
.resolve(
|
||||
&specifier,
|
||||
&project,
|
||||
manifest_target_kind,
|
||||
&mut *refreshed_sources.lock().await,
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package versions")?
|
||||
.1
|
||||
|
@ -109,13 +116,20 @@ impl OutdatedCommand {
|
|||
current_version_id.version(),
|
||||
version_id.version()
|
||||
);
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(true)
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
.await?
|
||||
.into_iter()
|
||||
.all(|b| b)
|
||||
{
|
||||
println!("all packages are up to date");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -4,10 +4,13 @@ use async_compression::Level;
|
|||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use fs_err::tokio as fs;
|
||||
#[allow(deprecated)]
|
||||
use pesde::{
|
||||
manifest::{target::Target, DependencyType},
|
||||
matching_globs_old_behaviour,
|
||||
scripts::ScriptName,
|
||||
source::{
|
||||
git_index::GitBasedSource,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageSource,
|
||||
|
@ -21,7 +24,7 @@ use pesde::{
|
|||
};
|
||||
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||
use semver::VersionReq;
|
||||
use std::path::Component;
|
||||
use std::{collections::HashSet, path::PathBuf};
|
||||
use tempfile::Builder;
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||
|
||||
|
@ -41,7 +44,12 @@ pub struct PublishCommand {
|
|||
}
|
||||
|
||||
impl PublishCommand {
|
||||
async fn run_impl(self, project: &Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
async fn run_impl(
|
||||
self,
|
||||
project: &Project,
|
||||
reqwest: reqwest::Client,
|
||||
is_root: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
|
@ -55,12 +63,17 @@ impl PublishCommand {
|
|||
);
|
||||
|
||||
if manifest.private {
|
||||
println!("{}", "package is private, cannot publish".red().bold());
|
||||
if !is_root {
|
||||
println!("{}", "package is private, cannot publish".red().bold());
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if manifest.target.lib_path().is_none() && manifest.target.bin_path().is_none() {
|
||||
if manifest.target.lib_path().is_none()
|
||||
&& manifest.target.bin_path().is_none()
|
||||
&& manifest.target.scripts().is_none_or(|s| s.is_empty())
|
||||
{
|
||||
anyhow::bail!("no exports found in target");
|
||||
}
|
||||
|
||||
|
@ -81,7 +94,7 @@ impl PublishCommand {
|
|||
.filter_map(|(_, node)| node.node.direct.as_ref().map(|_| node))
|
||||
.any(|node| {
|
||||
node.target.build_files().is_none()
|
||||
&& !matches!(node.node.ty, DependencyType::Dev)
|
||||
&& !matches!(node.node.resolved_ty, DependencyType::Dev)
|
||||
})
|
||||
{
|
||||
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
||||
|
@ -93,16 +106,21 @@ impl PublishCommand {
|
|||
}
|
||||
}
|
||||
|
||||
let canonical_package_dir = project
|
||||
.package_dir()
|
||||
.canonicalize()
|
||||
.context("failed to canonicalize package directory")?;
|
||||
|
||||
let mut archive = tokio_tar::Builder::new(
|
||||
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
|
||||
);
|
||||
|
||||
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
|
||||
let mut display_build_files: Vec<String> = vec![];
|
||||
|
||||
let (lib_path, bin_path, target_kind) = (
|
||||
let (lib_path, bin_path, scripts, target_kind) = (
|
||||
manifest.target.lib_path().cloned(),
|
||||
manifest.target.bin_path().cloned(),
|
||||
manifest.target.scripts().cloned(),
|
||||
manifest.target.kind(),
|
||||
);
|
||||
|
||||
|
@ -112,75 +130,94 @@ impl PublishCommand {
|
|||
_ => None,
|
||||
};
|
||||
|
||||
if manifest.includes.insert(MANIFEST_FILE_NAME.to_string()) {
|
||||
#[allow(deprecated)]
|
||||
let mut paths = matching_globs_old_behaviour(
|
||||
project.package_dir(),
|
||||
manifest.includes.iter().map(|s| s.as_str()),
|
||||
true,
|
||||
)
|
||||
.await
|
||||
.context("failed to get included files")?;
|
||||
|
||||
if paths.insert(PathBuf::from(MANIFEST_FILE_NAME)) {
|
||||
println!(
|
||||
"{}: {MANIFEST_FILE_NAME} was not in includes, adding it",
|
||||
"{}: {MANIFEST_FILE_NAME} was not included, adding it",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if manifest.includes.remove(".git") {
|
||||
println!(
|
||||
"{}: .git was in includes, removing it",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
if paths.iter().any(|p| p.starts_with(".git")) {
|
||||
anyhow::bail!("git directory was included, please remove it");
|
||||
}
|
||||
|
||||
if !manifest.includes.iter().any(|f| {
|
||||
if !paths.iter().any(|f| {
|
||||
matches!(
|
||||
f.to_lowercase().as_str(),
|
||||
f.to_str().unwrap().to_lowercase().as_str(),
|
||||
"readme" | "readme.md" | "readme.txt"
|
||||
)
|
||||
}) {
|
||||
println!(
|
||||
"{}: no README file in includes, consider adding one",
|
||||
"{}: no README file included, consider adding one",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if !manifest.includes.iter().any(|f| f == "docs") {
|
||||
if !paths.iter().any(|p| p.starts_with("docs")) {
|
||||
println!(
|
||||
"{}: no docs directory in includes, consider adding one",
|
||||
"{}: docs directory not included, consider adding one",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if manifest.includes.remove("default.project.json") {
|
||||
println!(
|
||||
"{}: default.project.json was in includes, this should be generated by the {} script upon dependants installation",
|
||||
"warn".yellow().bold(),
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
);
|
||||
for path in &paths {
|
||||
if path
|
||||
.file_name()
|
||||
.is_some_and(|n| n == "default.project.json")
|
||||
{
|
||||
anyhow::bail!(
|
||||
"default.project.json was included at `{}`, this should be generated by the {} script upon dependants installation",
|
||||
path.display(),
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) {
|
||||
if manifest.includes.remove(*ignored_path) {
|
||||
println!(
|
||||
r#"{}: {ignored_path} was in includes, removing it.
|
||||
{}: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
||||
{}: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
||||
"warn".yellow().bold(),
|
||||
"info".blue().bold(),
|
||||
"info".blue().bold()
|
||||
if paths.iter().any(|p| {
|
||||
p.components()
|
||||
.any(|ct| ct == std::path::Component::Normal(ignored_path.as_ref()))
|
||||
}) {
|
||||
anyhow::bail!(
|
||||
r#"forbidden file {ignored_path} was included.
|
||||
info: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
||||
info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] {
|
||||
let Some(export_path) = path else { continue };
|
||||
let Some(relative_export_path) = path else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let export_path = export_path.to_path(project.package_dir());
|
||||
if !export_path.exists() {
|
||||
anyhow::bail!("{name} points to non-existent file");
|
||||
}
|
||||
let export_path = relative_export_path.to_path(&canonical_package_dir);
|
||||
|
||||
if !export_path.is_file() {
|
||||
anyhow::bail!("{name} must point to a file");
|
||||
}
|
||||
let contents = match fs::read_to_string(&export_path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
anyhow::bail!("{name} does not exist");
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||
anyhow::bail!("{name} must point to a file");
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e).context(format!("failed to read {name}"));
|
||||
}
|
||||
};
|
||||
|
||||
let contents = fs::read_to_string(&export_path)
|
||||
.await
|
||||
.context(format!("failed to read {name}"))?;
|
||||
let export_path = export_path
|
||||
.canonicalize()
|
||||
.context(format!("failed to canonicalize {name}"))?;
|
||||
|
||||
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||
errs.into_iter()
|
||||
|
@ -191,71 +228,39 @@ impl PublishCommand {
|
|||
anyhow::bail!("{name} is not a valid Luau file: {err}");
|
||||
}
|
||||
|
||||
let first_part = export_path
|
||||
.strip_prefix(project.package_dir())
|
||||
.context(format!("{name} not within project directory"))?
|
||||
let first_part = relative_export_path
|
||||
.components()
|
||||
.next()
|
||||
.context(format!("{name} must contain at least one part"))?;
|
||||
|
||||
let first_part = match first_part {
|
||||
Component::Normal(part) => part,
|
||||
relative_path::Component::Normal(part) => part,
|
||||
_ => anyhow::bail!("{name} must be within project directory"),
|
||||
};
|
||||
|
||||
let first_part_str = first_part.to_string_lossy();
|
||||
|
||||
if manifest.includes.insert(first_part_str.to_string()) {
|
||||
if paths.insert(
|
||||
export_path
|
||||
.strip_prefix(&canonical_package_dir)
|
||||
.unwrap()
|
||||
.to_path_buf(),
|
||||
) {
|
||||
println!(
|
||||
"{}: {name} was not in includes, adding {first_part_str}",
|
||||
"{}: {name} was not included, adding {relative_export_path}",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if roblox_target.as_mut().map_or(false, |build_files| {
|
||||
build_files.insert(first_part_str.to_string())
|
||||
}) {
|
||||
if roblox_target
|
||||
.as_mut()
|
||||
.is_some_and(|build_files| build_files.insert(first_part.to_string()))
|
||||
{
|
||||
println!(
|
||||
"{}: {name} was not in build files, adding {first_part_str}",
|
||||
"{}: {name} was not in build files, adding {first_part}",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for included_name in &manifest.includes {
|
||||
let included_path = project.package_dir().join(included_name);
|
||||
|
||||
if !included_path.exists() {
|
||||
anyhow::bail!("included file {included_name} does not exist");
|
||||
}
|
||||
|
||||
// it's already included, and guaranteed to be a file
|
||||
if included_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if included_path.is_file() {
|
||||
display_includes.push(included_name.clone());
|
||||
|
||||
archive
|
||||
.append_file(
|
||||
included_name,
|
||||
fs::File::open(&included_path)
|
||||
.await
|
||||
.context(format!("failed to read {included_name}"))?
|
||||
.file_mut(),
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
display_includes.push(format!("{included_name}/*"));
|
||||
|
||||
archive
|
||||
.append_dir_all(included_name, &included_path)
|
||||
.await
|
||||
.context(format!("failed to include directory {included_name}"))?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(build_files) = &roblox_target {
|
||||
for build_file in build_files.iter() {
|
||||
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
|
@ -273,8 +278,8 @@ impl PublishCommand {
|
|||
anyhow::bail!("build file {build_file} does not exist");
|
||||
}
|
||||
|
||||
if !manifest.includes.contains(build_file) {
|
||||
anyhow::bail!("build file {build_file} is not in includes, please add it");
|
||||
if !paths.iter().any(|p| p.starts_with(build_file)) {
|
||||
anyhow::bail!("build file {build_file} is not included, please add it");
|
||||
}
|
||||
|
||||
if build_file_path.is_file() {
|
||||
|
@ -285,9 +290,80 @@ impl PublishCommand {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
let mut has_wally = false;
|
||||
let mut has_git = false;
|
||||
if let Some(scripts) = scripts {
|
||||
for (name, path) in scripts {
|
||||
let script_path = path.to_path(&canonical_package_dir);
|
||||
|
||||
let contents = match fs::read_to_string(&script_path).await {
|
||||
Ok(contents) => contents,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
anyhow::bail!("script {name} does not exist");
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
|
||||
anyhow::bail!("script {name} must point to a file");
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e).context(format!("failed to read script {name}"));
|
||||
}
|
||||
};
|
||||
|
||||
let script_path = script_path
|
||||
.canonicalize()
|
||||
.context(format!("failed to canonicalize script {name}"))?;
|
||||
|
||||
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||
errs.into_iter()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}) {
|
||||
anyhow::bail!("script {name} is not a valid Luau file: {err}");
|
||||
}
|
||||
|
||||
if paths.insert(
|
||||
script_path
|
||||
.strip_prefix(&canonical_package_dir)
|
||||
.unwrap()
|
||||
.to_path_buf(),
|
||||
) {
|
||||
println!(
|
||||
"{}: script {name} was not included, adding {path}",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for relative_path in &paths {
|
||||
let path = project.package_dir().join(relative_path);
|
||||
|
||||
if !path.exists() {
|
||||
anyhow::bail!("included file `{}` does not exist", path.display());
|
||||
}
|
||||
|
||||
let file_name = relative_path
|
||||
.file_name()
|
||||
.context("failed to get file name")?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// it'll be included later after transformations, and is guaranteed to be a file
|
||||
if file_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if path.is_file() {
|
||||
archive
|
||||
.append_file(
|
||||
&relative_path,
|
||||
fs::File::open(&path)
|
||||
.await
|
||||
.context(format!("failed to read `{}`", relative_path.display()))?
|
||||
.file_mut(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
for specifier in manifest
|
||||
.dependencies
|
||||
|
@ -312,8 +388,6 @@ impl PublishCommand {
|
|||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
has_wally = true;
|
||||
|
||||
let index_name = specifier
|
||||
.index
|
||||
.as_deref()
|
||||
|
@ -329,12 +403,10 @@ impl PublishCommand {
|
|||
.to_string(),
|
||||
);
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {
|
||||
has_git = true;
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {}
|
||||
DependencySpecifiers::Workspace(spec) => {
|
||||
let pkg_ref = WorkspacePackageSource
|
||||
.resolve(spec, project, target_kind)
|
||||
.resolve(spec, project, target_kind, &mut HashSet::new())
|
||||
.await
|
||||
.context("failed to resolve workspace package")?
|
||||
.1
|
||||
|
@ -429,11 +501,25 @@ impl PublishCommand {
|
|||
.bin_path()
|
||||
.map_or("(none)".to_string(), |p| p.to_string())
|
||||
);
|
||||
println!(
|
||||
"\tscripts: {}",
|
||||
manifest
|
||||
.target
|
||||
.scripts()
|
||||
.filter(|s| !s.is_empty())
|
||||
.map_or("(none)".to_string(), |s| {
|
||||
s.keys().cloned().collect::<Vec<_>>().join(", ")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
println!(
|
||||
"includes: {}",
|
||||
display_includes.into_iter().collect::<Vec<_>>().join(", ")
|
||||
paths
|
||||
.into_iter()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
if !self.dry_run
|
||||
|
@ -489,8 +575,7 @@ impl PublishCommand {
|
|||
.get(&self.index)
|
||||
.context(format!("missing index {}", self.index))?;
|
||||
let source = PesdePackageSource::new(index_url.clone());
|
||||
source
|
||||
.refresh(project)
|
||||
PackageSource::refresh(&source, project)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let config = source
|
||||
|
@ -506,15 +591,23 @@ impl PublishCommand {
|
|||
);
|
||||
}
|
||||
|
||||
manifest.all_dependencies().context("dependency conflict")?;
|
||||
let deps = manifest.all_dependencies().context("dependency conflict")?;
|
||||
|
||||
if !config.git_allowed && has_git {
|
||||
anyhow::bail!("git dependencies are not allowed on this index");
|
||||
}
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
if !config.wally_allowed && has_wally {
|
||||
anyhow::bail!("wally dependencies are not allowed on this index");
|
||||
if let Some((disallowed, _)) = deps.iter().find(|(_, (spec, _))| match spec {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
!config.other_registries_allowed.is_allowed_or_same(
|
||||
source.repo_url().clone(),
|
||||
gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap(),
|
||||
)
|
||||
}
|
||||
DependencySpecifiers::Git(spec) => !config.git_allowed.is_allowed(spec.repo.clone()),
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => !config
|
||||
.wally_allowed
|
||||
.is_allowed(gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap()),
|
||||
_ => false,
|
||||
}) {
|
||||
anyhow::bail!("dependency `{disallowed}` is not allowed on this index");
|
||||
}
|
||||
|
||||
if self.dry_run {
|
||||
|
@ -533,7 +626,7 @@ impl PublishCommand {
|
|||
.body(archive);
|
||||
|
||||
if let Some(token) = project.auth_config().tokens().get(index_url) {
|
||||
log::debug!("using token for {index_url}");
|
||||
tracing::debug!("using token for {index_url}");
|
||||
request = request.header(AUTHORIZATION, token);
|
||||
}
|
||||
|
||||
|
@ -569,7 +662,7 @@ impl PublishCommand {
|
|||
}
|
||||
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let result = self.clone().run_impl(&project, reqwest.clone()).await;
|
||||
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
|
||||
if project.workspace_dir().is_some() {
|
||||
return result;
|
||||
} else {
|
||||
|
@ -579,7 +672,7 @@ impl PublishCommand {
|
|||
run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
let this = self.clone();
|
||||
async move { this.run_impl(&project, reqwest).await }
|
||||
async move { this.run_impl(&project, reqwest, false).await }
|
||||
})
|
||||
.await
|
||||
.map(|_| ())
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
use crate::cli::{repos::update_scripts, up_to_date_lockfile};
|
||||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use pesde::{
|
||||
linking::generator::generate_bin_linking_module,
|
||||
names::{PackageName, PackageNames},
|
||||
Project, PACKAGES_CONTAINER_NAME,
|
||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command};
|
||||
use std::{
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::PathBuf,
|
||||
process::Command,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct RunCommand {
|
||||
|
@ -22,41 +26,38 @@ pub struct RunCommand {
|
|||
|
||||
impl RunCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let run = |path: PathBuf| {
|
||||
let package_dir = project.package_dir().to_path_buf();
|
||||
let fut = update_scripts(&project);
|
||||
async move {
|
||||
fut.await.expect("failed to update scripts");
|
||||
|
||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
package_dir,
|
||||
&format!("{:?}", path.to_string_lossy()),
|
||||
)
|
||||
.as_bytes(),
|
||||
let run = |root: PathBuf, file_path: PathBuf| {
|
||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
root,
|
||||
&format!("{:?}", file_path.to_string_lossy()),
|
||||
)
|
||||
.expect("failed to write to tempfile");
|
||||
.as_bytes(),
|
||||
)
|
||||
.expect("failed to write to tempfile");
|
||||
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().expect("failed to get current directory"))
|
||||
.status()
|
||||
.expect("failed to run script");
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().expect("failed to get current directory"))
|
||||
.status()
|
||||
.expect("failed to run script");
|
||||
|
||||
drop(caller);
|
||||
drop(caller);
|
||||
|
||||
std::process::exit(status.code().unwrap_or(1))
|
||||
}
|
||||
std::process::exit(status.code().unwrap_or(1))
|
||||
};
|
||||
|
||||
let Some(package_or_script) = self.package_or_script else {
|
||||
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
||||
run(script_path.to_path(project.package_dir())).await;
|
||||
run(
|
||||
project.package_dir().to_owned(),
|
||||
script_path.to_path(project.package_dir()),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -96,14 +97,19 @@ impl RunCommand {
|
|||
version_id.version(),
|
||||
);
|
||||
|
||||
run(bin_path.to_path(&container_folder)).await;
|
||||
let path = bin_path.to_path(&container_folder);
|
||||
|
||||
run(path.clone(), path);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(manifest) = project.deser_manifest().await {
|
||||
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
||||
run(script_path.to_path(project.package_dir())).await;
|
||||
run(
|
||||
project.package_dir().to_path_buf(),
|
||||
script_path.to_path(project.package_dir()),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
@ -115,7 +121,56 @@ impl RunCommand {
|
|||
anyhow::bail!("path `{}` does not exist", path.display());
|
||||
}
|
||||
|
||||
run(path).await;
|
||||
let workspace_dir = project
|
||||
.workspace_dir()
|
||||
.unwrap_or_else(|| project.package_dir());
|
||||
|
||||
let members = match project.workspace_members(workspace_dir, false).await {
|
||||
Ok(members) => members.boxed(),
|
||||
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
futures::stream::empty().boxed()
|
||||
}
|
||||
Err(e) => Err(e).context("failed to get workspace members")?,
|
||||
};
|
||||
|
||||
let members = members
|
||||
.map(|res| {
|
||||
res.map_err(anyhow::Error::from)
|
||||
.and_then(|(path, _)| path.canonicalize().map_err(Into::into))
|
||||
})
|
||||
.chain(futures::stream::once(async {
|
||||
workspace_dir.canonicalize().map_err(Into::into)
|
||||
}))
|
||||
.try_collect::<HashSet<_>>()
|
||||
.await
|
||||
.context("failed to collect workspace members")?;
|
||||
|
||||
let root = 'finder: {
|
||||
let mut current_path = path.to_path_buf();
|
||||
loop {
|
||||
let canonical_path = current_path
|
||||
.canonicalize()
|
||||
.context("failed to canonicalize parent")?;
|
||||
|
||||
if members.contains(&canonical_path)
|
||||
&& canonical_path.join(MANIFEST_FILE_NAME).exists()
|
||||
{
|
||||
break 'finder canonical_path;
|
||||
}
|
||||
|
||||
if let Some(parent) = current_path.parent() {
|
||||
current_path = parent.to_path_buf();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
project.package_dir().to_path_buf()
|
||||
};
|
||||
|
||||
run(root, path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use crate::cli::{
|
||||
config::read_config,
|
||||
version::{
|
||||
current_version, get_latest_remote_version, get_or_download_version, update_bin_exe,
|
||||
current_version, get_or_download_version, get_remote_version, no_build_metadata,
|
||||
update_bin_exe, TagInfo, VersionType,
|
||||
},
|
||||
};
|
||||
use anyhow::Context;
|
||||
|
@ -24,33 +25,33 @@ impl SelfUpgradeCommand {
|
|||
.context("no cached version found")?
|
||||
.1
|
||||
} else {
|
||||
get_latest_remote_version(&reqwest).await?
|
||||
get_remote_version(&reqwest, VersionType::Latest).await?
|
||||
};
|
||||
|
||||
if latest_version <= current_version() {
|
||||
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||
|
||||
if latest_version_no_metadata <= current_version() {
|
||||
println!("already up to date");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
|
||||
|
||||
if !inquire::prompt_confirmation(format!(
|
||||
"are you sure you want to upgrade {} from {} to {}?",
|
||||
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
|
||||
env!("CARGO_BIN_NAME").cyan(),
|
||||
current_version().to_string().yellow().bold(),
|
||||
latest_version.to_string().yellow().bold()
|
||||
env!("CARGO_PKG_VERSION").yellow().bold()
|
||||
))? {
|
||||
println!("cancelled upgrade");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let path = get_or_download_version(&reqwest, &latest_version, true)
|
||||
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
|
||||
.await?
|
||||
.unwrap();
|
||||
update_bin_exe(&path).await?;
|
||||
|
||||
println!(
|
||||
"upgraded to version {}!",
|
||||
latest_version.to_string().yellow().bold()
|
||||
);
|
||||
println!("upgraded to version {display_latest_version}!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
use crate::cli::{progress_bar, repos::update_scripts, run_on_workspace_members};
|
||||
use crate::cli::{progress_bar, run_on_workspace_members};
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use indicatif::MultiProgress;
|
||||
use pesde::{lockfile::Lockfile, Project};
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Args, Copy, Clone)]
|
||||
pub struct UpdateCommand {}
|
||||
|
||||
impl UpdateCommand {
|
||||
pub async fn run(
|
||||
self,
|
||||
project: Project,
|
||||
multi: MultiProgress,
|
||||
reqwest: reqwest::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
|
||||
let manifest = project
|
||||
|
@ -31,11 +26,10 @@ impl UpdateCommand {
|
|||
);
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(None, &mut refreshed_sources)
|
||||
.dependency_graph(None, &mut refreshed_sources, false)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
|
||||
update_scripts(&project).await?;
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
project
|
||||
.write_lockfile(Lockfile {
|
||||
|
@ -46,30 +40,34 @@ impl UpdateCommand {
|
|||
|
||||
graph: {
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, false, false)
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
false,
|
||||
false,
|
||||
|_| async { Ok::<_, std::io::Error>(()) },
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
&multi,
|
||||
"📥 ".to_string(),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Arc::into_inner(downloaded_graph)
|
||||
.unwrap()
|
||||
.into_inner()
|
||||
.unwrap()
|
||||
downloaded_graph
|
||||
.await
|
||||
.context("failed to download dependencies")?
|
||||
},
|
||||
|
||||
workspace: run_on_workspace_members(&project, |project| {
|
||||
let multi = multi.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
async move { Box::pin(self.run(project, multi, reqwest)).await }
|
||||
async move { Box::pin(self.run(project, reqwest)).await }
|
||||
})
|
||||
.await?,
|
||||
})
|
||||
|
|
|
@ -2,19 +2,16 @@ use crate::cli::{auth::Tokens, home_dir};
|
|||
use anyhow::Context;
|
||||
use fs_err::tokio as fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct CliConfig {
|
||||
#[serde(
|
||||
serialize_with = "crate::util::serialize_gix_url",
|
||||
deserialize_with = "crate::util::deserialize_gix_url"
|
||||
)]
|
||||
pub default_index: gix::Url,
|
||||
#[serde(
|
||||
serialize_with = "crate::util::serialize_gix_url",
|
||||
deserialize_with = "crate::util::deserialize_gix_url"
|
||||
)]
|
||||
pub scripts_repo: gix::Url,
|
||||
|
||||
pub tokens: Tokens,
|
||||
|
||||
|
@ -25,12 +22,7 @@ pub struct CliConfig {
|
|||
impl Default for CliConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
default_index: "https://github.com/daimond113/pesde-index"
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
scripts_repo: "https://github.com/daimond113/pesde-scripts"
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
||||
|
||||
tokens: Tokens(Default::default()),
|
||||
|
||||
|
@ -39,6 +31,7 @@ impl Default for CliConfig {
|
|||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn read_config() -> anyhow::Result<CliConfig> {
|
||||
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
||||
Ok(config_string) => config_string,
|
||||
|
@ -53,6 +46,7 @@ pub async fn read_config() -> anyhow::Result<CliConfig> {
|
|||
Ok(config)
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||
fs::write(home_dir()?.join("config.toml"), config_string)
|
||||
|
|
|
@ -2,7 +2,6 @@ use anyhow::Context;
|
|||
use colored::Colorize;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::StreamExt;
|
||||
use indicatif::MultiProgress;
|
||||
use pesde::{
|
||||
lockfile::Lockfile,
|
||||
manifest::target::TargetKind,
|
||||
|
@ -19,12 +18,12 @@ use std::{
|
|||
time::Duration,
|
||||
};
|
||||
use tokio::pin;
|
||||
use tracing::instrument;
|
||||
|
||||
pub mod auth;
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod files;
|
||||
pub mod repos;
|
||||
#[cfg(feature = "version-management")]
|
||||
pub mod version;
|
||||
|
||||
|
@ -44,6 +43,7 @@ pub async fn bin_dir() -> anyhow::Result<PathBuf> {
|
|||
Ok(bin_dir)
|
||||
}
|
||||
|
||||
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
|
||||
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
||||
let manifest = project.deser_manifest().await?;
|
||||
let lockfile = match project.deser_lockfile().await {
|
||||
|
@ -57,17 +57,17 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
|||
};
|
||||
|
||||
if manifest.overrides != lockfile.overrides {
|
||||
log::debug!("overrides are different");
|
||||
tracing::debug!("overrides are different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if manifest.target.kind() != lockfile.target {
|
||||
log::debug!("target kind is different");
|
||||
tracing::debug!("target kind is different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
||||
log::debug!("name or version is different");
|
||||
tracing::debug!("name or version is different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
|||
node.node
|
||||
.direct
|
||||
.as_ref()
|
||||
.map(|(_, spec)| (spec, node.node.ty))
|
||||
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
|
@ -87,9 +87,9 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
|||
.all_dependencies()
|
||||
.context("failed to get all dependencies")?
|
||||
.iter()
|
||||
.all(|(_, (spec, ty))| specs.contains(&(spec, *ty)));
|
||||
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
||||
|
||||
log::debug!("dependencies are the same: {same_dependencies}");
|
||||
tracing::debug!("dependencies are the same: {same_dependencies}");
|
||||
|
||||
Ok(if same_dependencies {
|
||||
Some(lockfile)
|
||||
|
@ -134,7 +134,7 @@ impl VersionedPackageName {
|
|||
let versions = graph.get(&self.0).context("package not found in graph")?;
|
||||
if versions.len() == 1 {
|
||||
let version = versions.keys().next().unwrap().clone();
|
||||
log::debug!("only one version found, using {version}");
|
||||
tracing::debug!("only one version found, using {version}");
|
||||
version
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
|
@ -196,21 +196,18 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
|
|||
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
|
||||
len: u64,
|
||||
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
|
||||
multi: &MultiProgress,
|
||||
prefix: String,
|
||||
progress_msg: String,
|
||||
finish_msg: String,
|
||||
) -> anyhow::Result<()> {
|
||||
let bar = multi.add(
|
||||
indicatif::ProgressBar::new(len)
|
||||
.with_style(
|
||||
indicatif::ProgressStyle::default_bar()
|
||||
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
||||
.progress_chars("█▓▒░ "),
|
||||
)
|
||||
.with_prefix(prefix)
|
||||
.with_message(progress_msg),
|
||||
);
|
||||
let bar = indicatif::ProgressBar::new(len)
|
||||
.with_style(
|
||||
indicatif::ProgressStyle::default_bar()
|
||||
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
||||
.progress_chars("█▓▒░ "),
|
||||
)
|
||||
.with_prefix(prefix)
|
||||
.with_message(progress_msg);
|
||||
bar.enable_steady_tick(Duration::from_millis(100));
|
||||
|
||||
while let Some(result) = rx.recv().await {
|
||||
|
@ -249,7 +246,9 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
|||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
let members_future = project.workspace_members(project.package_dir()).await?;
|
||||
let members_future = project
|
||||
.workspace_members(project.package_dir(), true)
|
||||
.await?;
|
||||
pin!(members_future);
|
||||
|
||||
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
||||
|
@ -258,7 +257,10 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
|||
let relative_path =
|
||||
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
||||
|
||||
f(shift_project_dir(project, path)).await?;
|
||||
// don't run on the current workspace root
|
||||
if relative_path != "" {
|
||||
f(shift_project_dir(project, path)).await?;
|
||||
}
|
||||
|
||||
results
|
||||
.entry(manifest.name)
|
||||
|
|
143
src/cli/repos.rs
143
src/cli/repos.rs
|
@ -1,143 +0,0 @@
|
|||
use crate::{
|
||||
cli::{config::read_config, home_dir},
|
||||
util::authenticate_conn,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use fs_err::tokio as fs;
|
||||
use gix::remote::{fetch::Shallow, Direction};
|
||||
use pesde::Project;
|
||||
use std::{path::Path, sync::atomic::AtomicBool};
|
||||
use tokio::{runtime::Handle, task::spawn_blocking};
|
||||
|
||||
async fn update_repo<P: AsRef<Path>>(
|
||||
name: &str,
|
||||
path: P,
|
||||
url: gix::Url,
|
||||
project: &Project,
|
||||
) -> anyhow::Result<()> {
|
||||
let path = path.as_ref();
|
||||
let should_update = path.exists();
|
||||
|
||||
let (repo, oid) = if should_update {
|
||||
let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
|
||||
|
||||
let remote = repo
|
||||
.find_default_remote(Direction::Fetch)
|
||||
.context(format!("missing default remote of {name} repository"))?
|
||||
.context(format!(
|
||||
"failed to find default remote of {name} repository"
|
||||
))?;
|
||||
|
||||
let mut connection = remote.connect(Direction::Fetch).context(format!(
|
||||
"failed to connect to default remote of {name} repository"
|
||||
))?;
|
||||
|
||||
authenticate_conn(&mut connection, project.auth_config());
|
||||
|
||||
let results = connection
|
||||
.prepare_fetch(gix::progress::Discard, Default::default())
|
||||
.context(format!("failed to prepare {name} repository fetch"))?
|
||||
.with_shallow(Shallow::Deepen(1))
|
||||
.receive(gix::progress::Discard, &false.into())
|
||||
.context(format!("failed to receive new {name} repository contents"))?;
|
||||
|
||||
let remote_ref = results
|
||||
.ref_map
|
||||
.remote_refs
|
||||
.first()
|
||||
.context(format!("failed to get remote refs of {name} repository"))?;
|
||||
|
||||
let unpacked = remote_ref.unpack();
|
||||
let oid = unpacked
|
||||
.1
|
||||
.or(unpacked.2)
|
||||
.context("couldn't find oid in remote ref")?;
|
||||
|
||||
(repo, gix::ObjectId::from(oid))
|
||||
} else {
|
||||
fs::create_dir_all(path)
|
||||
.await
|
||||
.context(format!("failed to create {name} directory"))?;
|
||||
|
||||
let repo = gix::prepare_clone(url, path)
|
||||
.context(format!("failed to prepare {name} repository clone"))?
|
||||
.with_shallow(Shallow::Deepen(1))
|
||||
.fetch_only(gix::progress::Discard, &false.into())
|
||||
.context(format!("failed to fetch and checkout {name} repository"))?
|
||||
.0;
|
||||
|
||||
let oid = {
|
||||
let mut head = repo
|
||||
.head()
|
||||
.context(format!("failed to get {name} repository head"))?;
|
||||
let obj = head
|
||||
.peel_to_object_in_place()
|
||||
.context(format!("failed to peel {name} repository head to object"))?;
|
||||
|
||||
obj.id
|
||||
};
|
||||
|
||||
(repo, oid)
|
||||
};
|
||||
|
||||
let tree = repo
|
||||
.find_object(oid)
|
||||
.context(format!("failed to find {name} repository tree"))?
|
||||
.peel_to_tree()
|
||||
.context(format!("failed to peel {name} repository object to tree"))?;
|
||||
|
||||
let mut index = gix::index::File::from_state(
|
||||
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
|
||||
format!("failed to create index state from {name} repository tree"),
|
||||
)?,
|
||||
repo.index_path(),
|
||||
);
|
||||
|
||||
let opts = gix::worktree::state::checkout::Options {
|
||||
overwrite_existing: true,
|
||||
destination_is_initially_empty: !should_update,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
gix::worktree::state::checkout(
|
||||
&mut index,
|
||||
repo.work_dir().context(format!("{name} repo is bare"))?,
|
||||
repo.objects
|
||||
.clone()
|
||||
.into_arc()
|
||||
.context("failed to clone objects")?,
|
||||
&gix::progress::Discard,
|
||||
&gix::progress::Discard,
|
||||
&false.into(),
|
||||
opts,
|
||||
)
|
||||
.context(format!("failed to checkout {name} repository"))?;
|
||||
|
||||
index
|
||||
.write(gix::index::write::Options::default())
|
||||
.context("failed to write index")
|
||||
}
|
||||
|
||||
static SCRIPTS_UPDATED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub async fn update_scripts(project: &Project) -> anyhow::Result<()> {
|
||||
if SCRIPTS_UPDATED.swap(true, std::sync::atomic::Ordering::Relaxed) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let home_dir = home_dir()?;
|
||||
let config = read_config().await?;
|
||||
|
||||
let project = project.clone();
|
||||
spawn_blocking(move || {
|
||||
Handle::current().block_on(update_repo(
|
||||
"scripts",
|
||||
home_dir.join("scripts"),
|
||||
config.scripts_repo,
|
||||
&project,
|
||||
))
|
||||
})
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -15,7 +15,8 @@ use std::{
|
|||
env::current_exe,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncWrite;
|
||||
use tracing::instrument;
|
||||
|
||||
pub fn current_version() -> Version {
|
||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||
|
@ -33,18 +34,33 @@ struct Asset {
|
|||
url: url::Url,
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
fn get_repo() -> (String, String) {
|
||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||
(
|
||||
let (owner, repo) = (
|
||||
parts.next().unwrap().to_string(),
|
||||
parts.next().unwrap().to_string(),
|
||||
)
|
||||
);
|
||||
|
||||
tracing::trace!("repository for updates: {owner}/{repo}");
|
||||
|
||||
(owner, repo)
|
||||
}
|
||||
|
||||
pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
|
||||
#[derive(Debug)]
|
||||
pub enum VersionType {
|
||||
Latest,
|
||||
Specific(Version),
|
||||
}
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn get_remote_version(
|
||||
reqwest: &reqwest::Client,
|
||||
ty: VersionType,
|
||||
) -> anyhow::Result<Version> {
|
||||
let (owner, repo) = get_repo();
|
||||
|
||||
let releases = reqwest
|
||||
let mut releases = reqwest
|
||||
.get(format!(
|
||||
"https://api.github.com/repos/{owner}/{repo}/releases",
|
||||
))
|
||||
|
@ -55,17 +71,28 @@ pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Res
|
|||
.context("failed to get GitHub API response")?
|
||||
.json::<Vec<Release>>()
|
||||
.await
|
||||
.context("failed to parse GitHub API response")?;
|
||||
|
||||
releases
|
||||
.context("failed to parse GitHub API response")?
|
||||
.into_iter()
|
||||
.map(|release| Version::parse(release.tag_name.trim_start_matches('v')).unwrap())
|
||||
.max()
|
||||
.context("failed to find latest version")
|
||||
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
|
||||
|
||||
match ty {
|
||||
VersionType::Latest => releases.max(),
|
||||
VersionType::Specific(version) => {
|
||||
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
|
||||
}
|
||||
}
|
||||
.context("failed to find latest version")
|
||||
}
|
||||
|
||||
pub fn no_build_metadata(version: &Version) -> Version {
|
||||
let mut version = version.clone();
|
||||
version.build = semver::BuildMetadata::EMPTY;
|
||||
version
|
||||
}
|
||||
|
||||
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
||||
let config = read_config().await?;
|
||||
|
||||
|
@ -73,9 +100,11 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
|||
.last_checked_updates
|
||||
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
||||
{
|
||||
tracing::debug!("using cached version");
|
||||
version
|
||||
} else {
|
||||
let version = get_latest_remote_version(reqwest).await?;
|
||||
tracing::debug!("checking for updates");
|
||||
let version = get_remote_version(reqwest, VersionType::Latest).await?;
|
||||
|
||||
write_config(&CliConfig {
|
||||
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
||||
|
@ -86,72 +115,77 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
|||
version
|
||||
};
|
||||
let current_version = current_version();
|
||||
let version_no_metadata = no_build_metadata(&version);
|
||||
|
||||
if version > current_version {
|
||||
let name = env!("CARGO_BIN_NAME");
|
||||
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"),);
|
||||
|
||||
let unformatted_messages = [
|
||||
"".to_string(),
|
||||
format!("update available! {current_version} → {version}"),
|
||||
format!("changelog: {changelog}"),
|
||||
format!("run `{name} self-upgrade` to upgrade"),
|
||||
"".to_string(),
|
||||
];
|
||||
|
||||
let width = unformatted_messages
|
||||
.iter()
|
||||
.map(|s| s.chars().count())
|
||||
.max()
|
||||
.unwrap()
|
||||
+ 4;
|
||||
|
||||
let column = "│".bright_magenta();
|
||||
|
||||
let message = [
|
||||
"".to_string(),
|
||||
format!(
|
||||
"update available! {} → {}",
|
||||
current_version.to_string().red(),
|
||||
version.to_string().green()
|
||||
),
|
||||
format!("changelog: {}", changelog.blue()),
|
||||
format!(
|
||||
"run `{} {}` to upgrade",
|
||||
name.blue(),
|
||||
"self-upgrade".yellow()
|
||||
),
|
||||
"".to_string(),
|
||||
]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, s)| {
|
||||
let text_length = unformatted_messages[i].chars().count();
|
||||
let padding = (width as f32 - text_length as f32) / 2f32;
|
||||
let padding_l = " ".repeat(padding.floor() as usize);
|
||||
let padding_r = " ".repeat(padding.ceil() as usize);
|
||||
format!("{column}{padding_l}{s}{padding_r}{column}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let lines = "─".repeat(width).bright_magenta();
|
||||
|
||||
let tl = "╭".bright_magenta();
|
||||
let tr = "╮".bright_magenta();
|
||||
let bl = "╰".bright_magenta();
|
||||
let br = "╯".bright_magenta();
|
||||
|
||||
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
||||
if version_no_metadata <= current_version {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let name = env!("CARGO_BIN_NAME");
|
||||
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
|
||||
|
||||
let unformatted_messages = [
|
||||
"".to_string(),
|
||||
format!("update available! {current_version} → {version_no_metadata}"),
|
||||
format!("changelog: {changelog}"),
|
||||
format!("run `{name} self-upgrade` to upgrade"),
|
||||
"".to_string(),
|
||||
];
|
||||
|
||||
let width = unformatted_messages
|
||||
.iter()
|
||||
.map(|s| s.chars().count())
|
||||
.max()
|
||||
.unwrap()
|
||||
+ 4;
|
||||
|
||||
let column = "│".bright_magenta();
|
||||
|
||||
let message = [
|
||||
"".to_string(),
|
||||
format!(
|
||||
"update available! {} → {}",
|
||||
current_version.to_string().red(),
|
||||
version_no_metadata.to_string().green()
|
||||
),
|
||||
format!("changelog: {}", changelog.blue()),
|
||||
format!(
|
||||
"run `{} {}` to upgrade",
|
||||
name.blue(),
|
||||
"self-upgrade".yellow()
|
||||
),
|
||||
"".to_string(),
|
||||
]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, s)| {
|
||||
let text_length = unformatted_messages[i].chars().count();
|
||||
let padding = (width as f32 - text_length as f32) / 2f32;
|
||||
let padding_l = " ".repeat(padding.floor() as usize);
|
||||
let padding_r = " ".repeat(padding.ceil() as usize);
|
||||
format!("{column}{padding_l}{s}{padding_r}{column}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let lines = "─".repeat(width).bright_magenta();
|
||||
|
||||
let tl = "╭".bright_magenta();
|
||||
let tr = "╮".bright_magenta();
|
||||
let bl = "╰".bright_magenta();
|
||||
let br = "╯".bright_magenta();
|
||||
|
||||
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn download_github_release(
|
||||
#[instrument(skip(reqwest, writer), level = "trace")]
|
||||
pub async fn download_github_release<W: AsyncWrite + Unpin>(
|
||||
reqwest: &reqwest::Client,
|
||||
version: &Version,
|
||||
) -> anyhow::Result<Vec<u8>> {
|
||||
mut writer: W,
|
||||
) -> anyhow::Result<()> {
|
||||
let (owner, repo) = get_repo();
|
||||
|
||||
let release = reqwest
|
||||
|
@ -202,19 +236,22 @@ pub async fn download_github_release(
|
|||
.context("archive has no entry")?
|
||||
.context("failed to get first archive entry")?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
entry
|
||||
.read_to_end(&mut result)
|
||||
tokio::io::copy(&mut entry, &mut writer)
|
||||
.await
|
||||
.context("failed to read archive entry bytes")?;
|
||||
|
||||
Ok(result)
|
||||
.context("failed to write archive entry to file")
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TagInfo {
|
||||
Complete(Version),
|
||||
Incomplete(Version),
|
||||
}
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn get_or_download_version(
|
||||
reqwest: &reqwest::Client,
|
||||
version: &Version,
|
||||
tag: &TagInfo,
|
||||
always_give_path: bool,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
let path = home_dir()?.join("versions");
|
||||
|
@ -222,11 +259,23 @@ pub async fn get_or_download_version(
|
|||
.await
|
||||
.context("failed to create versions directory")?;
|
||||
|
||||
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
|
||||
let version = match tag {
|
||||
TagInfo::Complete(version) => version,
|
||||
// don't fetch the version since it could be cached
|
||||
TagInfo::Incomplete(version) => version,
|
||||
};
|
||||
|
||||
let path = path.join(format!(
|
||||
"{}{}",
|
||||
no_build_metadata(version),
|
||||
std::env::consts::EXE_SUFFIX
|
||||
));
|
||||
|
||||
let is_requested_version = !always_give_path && *version == current_version();
|
||||
|
||||
if path.exists() {
|
||||
tracing::debug!("version already exists");
|
||||
|
||||
return Ok(if is_requested_version {
|
||||
None
|
||||
} else {
|
||||
|
@ -235,14 +284,29 @@ pub async fn get_or_download_version(
|
|||
}
|
||||
|
||||
if is_requested_version {
|
||||
tracing::debug!("copying current executable to version directory");
|
||||
fs::copy(current_exe()?, &path)
|
||||
.await
|
||||
.context("failed to copy current executable to version directory")?;
|
||||
} else {
|
||||
let bytes = download_github_release(reqwest, version).await?;
|
||||
fs::write(&path, bytes)
|
||||
.await
|
||||
.context("failed to write downloaded version file")?;
|
||||
let version = match tag {
|
||||
TagInfo::Complete(version) => version.clone(),
|
||||
TagInfo::Incomplete(version) => {
|
||||
get_remote_version(reqwest, VersionType::Specific(version.clone()))
|
||||
.await
|
||||
.context("failed to get remote version")?
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!("downloading version");
|
||||
download_github_release(
|
||||
reqwest,
|
||||
&version,
|
||||
fs::File::create(&path)
|
||||
.await
|
||||
.context("failed to create version file")?,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
make_executable(&path)
|
||||
|
@ -256,6 +320,7 @@ pub async fn get_or_download_version(
|
|||
})
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
||||
let bin_exe_path = bin_dir().await?.join(format!(
|
||||
"{}{}",
|
||||
|
|
127
src/download.rs
127
src/download.rs
|
@ -13,16 +13,18 @@ use std::{
|
|||
collections::HashSet,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tracing::{instrument, Instrument};
|
||||
|
||||
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
||||
|
||||
type MultithreadDownloadJob = (
|
||||
pub(crate) type MultithreadDownloadJob = (
|
||||
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
|
||||
MultithreadedGraph,
|
||||
);
|
||||
|
||||
impl Project {
|
||||
/// Downloads a graph of dependencies
|
||||
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
|
||||
pub async fn download_graph(
|
||||
&self,
|
||||
graph: &DependencyGraph,
|
||||
|
@ -30,6 +32,7 @@ impl Project {
|
|||
reqwest: &reqwest::Client,
|
||||
prod: bool,
|
||||
write: bool,
|
||||
wally: bool,
|
||||
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
||||
let manifest = self.deser_manifest().await?;
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
|
@ -53,84 +56,102 @@ impl Project {
|
|||
)
|
||||
.await?;
|
||||
|
||||
let project = Arc::new(self.clone());
|
||||
|
||||
for (name, versions) in graph {
|
||||
for (version_id, node) in versions {
|
||||
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||
if node.pkg_ref.like_wally() != wally {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tx = tx.clone();
|
||||
|
||||
let name = name.clone();
|
||||
let version_id = version_id.clone();
|
||||
let node = node.clone();
|
||||
|
||||
let project = Arc::new(self.clone());
|
||||
let span = tracing::info_span!(
|
||||
"download",
|
||||
name = name.to_string(),
|
||||
version_id = version_id.to_string()
|
||||
);
|
||||
|
||||
let project = project.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let downloaded_graph = downloaded_graph.clone();
|
||||
|
||||
let package_dir = self.package_dir().to_path_buf();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let source = node.pkg_ref.source();
|
||||
tokio::spawn(
|
||||
async move {
|
||||
let source = node.pkg_ref.source();
|
||||
|
||||
let container_folder = node.container_folder(
|
||||
&package_dir
|
||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
&name,
|
||||
version_id.version(),
|
||||
);
|
||||
let container_folder = node.container_folder(
|
||||
&package_dir
|
||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
&name,
|
||||
version_id.version(),
|
||||
);
|
||||
|
||||
match fs::create_dir_all(&container_folder).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let project = project.clone();
|
||||
|
||||
log::debug!("downloading {name}@{version_id}");
|
||||
|
||||
let (fs, target) =
|
||||
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
||||
Ok(target) => target,
|
||||
match fs::create_dir_all(&container_folder).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tx.send(Err(Box::new(e).into())).await.unwrap();
|
||||
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
log::debug!("downloaded {name}@{version_id}");
|
||||
let project = project.clone();
|
||||
|
||||
if write {
|
||||
if !prod || node.ty != DependencyType::Dev {
|
||||
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
||||
Ok(_) => {}
|
||||
tracing::debug!("downloading");
|
||||
|
||||
let (fs, target) =
|
||||
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
||||
Ok(target) => target,
|
||||
Err(e) => {
|
||||
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
tx.send(Err(Box::new(e).into())).await.unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
log::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode");
|
||||
|
||||
tracing::debug!("downloaded");
|
||||
|
||||
if write {
|
||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"skipping write to disk, dev dependency in prod mode"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let display_name = format!("{name}@{version_id}");
|
||||
|
||||
{
|
||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||
downloaded_graph
|
||||
.entry(name)
|
||||
.or_default()
|
||||
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
||||
}
|
||||
|
||||
tx.send(Ok(display_name)).await.unwrap();
|
||||
}
|
||||
|
||||
let display_name = format!("{name}@{version_id}");
|
||||
|
||||
{
|
||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||
downloaded_graph
|
||||
.entry(name)
|
||||
.or_default()
|
||||
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
||||
}
|
||||
|
||||
tx.send(Ok(display_name)).await.unwrap();
|
||||
});
|
||||
.instrument(span),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
176
src/download_and_link.rs
Normal file
176
src/download_and_link.rs
Normal file
|
@ -0,0 +1,176 @@
|
|||
use crate::{
|
||||
lockfile::{DependencyGraph, DownloadedGraph},
|
||||
manifest::DependencyType,
|
||||
source::PackageSources,
|
||||
Project,
|
||||
};
|
||||
use futures::FutureExt;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
future::Future,
|
||||
sync::{Arc, Mutex as StdMutex},
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{instrument, Instrument};
|
||||
|
||||
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
||||
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
|
||||
if !prod {
|
||||
return graph.clone();
|
||||
}
|
||||
|
||||
graph
|
||||
.iter()
|
||||
.map(|(name, versions)| {
|
||||
(
|
||||
name.clone(),
|
||||
versions
|
||||
.iter()
|
||||
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
|
||||
.map(|(v_id, node)| (v_id.clone(), node.clone()))
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Receiver for dependencies downloaded and linked
|
||||
pub type DownloadAndLinkReceiver =
|
||||
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
||||
|
||||
impl Project {
|
||||
/// Downloads a graph of dependencies and links them in the correct order
|
||||
#[instrument(
|
||||
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
|
||||
level = "debug"
|
||||
)]
|
||||
pub async fn download_and_link<
|
||||
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
|
||||
R: Future<Output = Result<(), E>> + Send,
|
||||
E: Send + Sync + 'static,
|
||||
>(
|
||||
&self,
|
||||
graph: &Arc<DependencyGraph>,
|
||||
refreshed_sources: &Arc<Mutex<HashSet<PackageSources>>>,
|
||||
reqwest: &reqwest::Client,
|
||||
prod: bool,
|
||||
write: bool,
|
||||
pesde_cb: F,
|
||||
) -> Result<
|
||||
(
|
||||
DownloadAndLinkReceiver,
|
||||
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
|
||||
),
|
||||
errors::DownloadAndLinkError<E>,
|
||||
> {
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||
graph
|
||||
.iter()
|
||||
.map(|(_, versions)| versions.len())
|
||||
.sum::<usize>()
|
||||
.max(1),
|
||||
);
|
||||
let downloaded_graph = Arc::new(StdMutex::new(DownloadedGraph::default()));
|
||||
|
||||
let this = self.clone();
|
||||
let graph = graph.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
|
||||
Ok((
|
||||
rx,
|
||||
tokio::spawn(async move {
|
||||
let mut refreshed_sources = refreshed_sources.lock().await;
|
||||
|
||||
// step 1. download pesde dependencies
|
||||
let (mut pesde_rx, pesde_graph) = this
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
|
||||
.instrument(tracing::debug_span!("download (pesde)"))
|
||||
.await?;
|
||||
|
||||
while let Some(result) = pesde_rx.recv().await {
|
||||
tx.send(result).await.unwrap();
|
||||
}
|
||||
|
||||
let pesde_graph = Arc::into_inner(pesde_graph).unwrap().into_inner().unwrap();
|
||||
|
||||
// step 2. link pesde dependencies. do so without types
|
||||
if write {
|
||||
this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
|
||||
.instrument(tracing::debug_span!("link (pesde)"))
|
||||
.await?;
|
||||
}
|
||||
|
||||
let pesde_graph = Arc::new(pesde_graph);
|
||||
|
||||
pesde_cb(&pesde_graph)
|
||||
.await
|
||||
.map_err(errors::DownloadAndLinkError::PesdeCallback)?;
|
||||
|
||||
let pesde_graph = Arc::into_inner(pesde_graph).unwrap();
|
||||
|
||||
// step 3. download wally dependencies
|
||||
let (mut wally_rx, wally_graph) = this
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
|
||||
.instrument(tracing::debug_span!("download (wally)"))
|
||||
.await?;
|
||||
|
||||
while let Some(result) = wally_rx.recv().await {
|
||||
tx.send(result).await.unwrap();
|
||||
}
|
||||
|
||||
let wally_graph = Arc::into_inner(wally_graph).unwrap().into_inner().unwrap();
|
||||
|
||||
{
|
||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||
downloaded_graph.extend(pesde_graph);
|
||||
for (name, versions) in wally_graph {
|
||||
for (version_id, node) in versions {
|
||||
downloaded_graph
|
||||
.entry(name.clone())
|
||||
.or_default()
|
||||
.insert(version_id, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let graph = Arc::into_inner(downloaded_graph)
|
||||
.unwrap()
|
||||
.into_inner()
|
||||
.unwrap();
|
||||
|
||||
// step 4. link ALL dependencies. do so with types
|
||||
if write {
|
||||
this.link_dependencies(&filter_graph(&graph, prod), true)
|
||||
.instrument(tracing::debug_span!("link (all)"))
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(graph)
|
||||
})
|
||||
.map(|r| r.unwrap()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when downloading and linking dependencies
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// An error that can occur when downloading and linking dependencies
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadAndLinkError<E> {
|
||||
/// An error occurred while downloading the graph
|
||||
#[error("error downloading graph")]
|
||||
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
|
||||
|
||||
/// An error occurred while linking dependencies
|
||||
#[error("error linking dependencies")]
|
||||
Linking(#[from] crate::linking::errors::LinkingError),
|
||||
|
||||
/// An error occurred while executing the pesde callback
|
||||
#[error("error executing pesde callback")]
|
||||
PesdeCallback(#[source] E),
|
||||
}
|
||||
}
|
191
src/lib.rs
191
src/lib.rs
|
@ -14,11 +14,16 @@ use futures::{future::try_join_all, Stream};
|
|||
use gix::sec::identity::Account;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Debug,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tracing::instrument;
|
||||
use wax::Pattern;
|
||||
|
||||
/// Downloading packages
|
||||
pub mod download;
|
||||
/// Utility for downloading and linking in the correct order
|
||||
pub mod download_and_link;
|
||||
/// Linking packages
|
||||
pub mod linking;
|
||||
/// Lockfile
|
||||
|
@ -47,6 +52,8 @@ pub const DEFAULT_INDEX_NAME: &str = "default";
|
|||
/// The name of the packages container
|
||||
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
|
||||
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
|
||||
/// The folder in which scripts are linked
|
||||
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
|
||||
|
||||
/// Struct containing the authentication configuration
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
@ -144,29 +151,35 @@ impl Project {
|
|||
}
|
||||
|
||||
/// Read the manifest file
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||
Ok(string)
|
||||
}
|
||||
|
||||
// TODO: cache the manifest
|
||||
/// Deserialize the manifest file
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
||||
Ok(toml::from_str(&string)?)
|
||||
}
|
||||
|
||||
/// Write the manifest file
|
||||
#[instrument(skip(self, manifest), level = "debug")]
|
||||
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
||||
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
|
||||
}
|
||||
|
||||
/// Deserialize the lockfile
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
||||
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
|
||||
Ok(toml::from_str(&string)?)
|
||||
}
|
||||
|
||||
/// Write the lockfile
|
||||
#[instrument(skip(self, lockfile), level = "debug")]
|
||||
pub async fn write_lockfile(
|
||||
&self,
|
||||
lockfile: Lockfile,
|
||||
|
@ -177,9 +190,11 @@ impl Project {
|
|||
}
|
||||
|
||||
/// Get the workspace members
|
||||
pub async fn workspace_members<P: AsRef<Path>>(
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
pub async fn workspace_members<P: AsRef<Path> + Debug>(
|
||||
&self,
|
||||
dir: P,
|
||||
can_ref_self: bool,
|
||||
) -> Result<
|
||||
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
|
||||
errors::WorkspaceMembersError,
|
||||
|
@ -192,15 +207,13 @@ impl Project {
|
|||
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
||||
})?;
|
||||
|
||||
let members = manifest
|
||||
.workspace_members
|
||||
.into_iter()
|
||||
.map(|glob| dir.join(glob))
|
||||
.map(|path| glob::glob(&path.as_os_str().to_string_lossy()))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flat_map(|paths| paths.into_iter())
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let members = matching_globs(
|
||||
dir,
|
||||
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||
false,
|
||||
can_ref_self,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(stream! {
|
||||
for path in members {
|
||||
|
@ -217,6 +230,145 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
/// Gets all matching paths in a directory
|
||||
#[deprecated(
|
||||
since = "0.5.0-rc.13",
|
||||
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
|
||||
)]
|
||||
#[instrument(ret, level = "trace")]
|
||||
pub async fn matching_globs_old_behaviour<
|
||||
'a,
|
||||
P: AsRef<Path> + Debug,
|
||||
I: IntoIterator<Item = &'a str> + Debug,
|
||||
>(
|
||||
dir: P,
|
||||
globs: I,
|
||||
relative: bool,
|
||||
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||
let (negative_globs, positive_globs) = globs
|
||||
.into_iter()
|
||||
.partition::<Vec<_>, _>(|glob| glob.starts_with('!'));
|
||||
|
||||
let negative_globs = wax::any(
|
||||
negative_globs
|
||||
.into_iter()
|
||||
.map(|glob| wax::Glob::new(&glob[1..]))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
)?;
|
||||
|
||||
let (positive_globs, file_names) = positive_globs
|
||||
.into_iter()
|
||||
// only globs we can be sure of (maintaining compatibility with old "only file/dir name" system)
|
||||
.partition::<Vec<_>, _>(|glob| glob.contains('/'));
|
||||
let file_names = file_names.into_iter().collect::<HashSet<_>>();
|
||||
|
||||
let positive_globs = wax::any(
|
||||
positive_globs
|
||||
.into_iter()
|
||||
.map(wax::Glob::new)
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
)?;
|
||||
|
||||
let mut read_dirs = vec![(fs::read_dir(dir.as_ref().to_path_buf()).await?, false)];
|
||||
let mut paths = HashSet::new();
|
||||
|
||||
let mut is_root = true;
|
||||
|
||||
while let Some((mut read_dir, is_entire_dir_included)) = read_dirs.pop() {
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||
let file_name = path.file_name().unwrap();
|
||||
let is_filename_match =
|
||||
is_root && file_name.to_str().is_some_and(|s| file_names.contains(s));
|
||||
|
||||
if entry.file_type().await?.is_dir() {
|
||||
read_dirs.push((
|
||||
fs::read_dir(&path).await?,
|
||||
is_entire_dir_included || is_filename_match,
|
||||
));
|
||||
if is_filename_match {
|
||||
tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
if (is_entire_dir_included || is_filename_match)
|
||||
|| (positive_globs.is_match(relative_path)
|
||||
&& !negative_globs.is_match(relative_path))
|
||||
{
|
||||
paths.insert(if relative {
|
||||
relative_path.to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
is_root = false;
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
/// Gets all matching paths in a directory
|
||||
#[instrument(ret, level = "trace")]
|
||||
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
|
||||
dir: P,
|
||||
globs: I,
|
||||
relative: bool,
|
||||
can_ref_self: bool,
|
||||
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||
let (negative_globs, mut positive_globs): (HashSet<&str>, _) =
|
||||
globs.into_iter().partition(|glob| glob.starts_with('!'));
|
||||
|
||||
let include_self = positive_globs.remove(".") && can_ref_self;
|
||||
|
||||
let negative_globs = wax::any(
|
||||
negative_globs
|
||||
.into_iter()
|
||||
.map(|glob| wax::Glob::new(&glob[1..]))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
)?;
|
||||
let positive_globs = wax::any(
|
||||
positive_globs
|
||||
.into_iter()
|
||||
.map(wax::Glob::new)
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
)?;
|
||||
|
||||
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf()).await?];
|
||||
let mut paths = HashSet::new();
|
||||
|
||||
if include_self {
|
||||
paths.insert(if relative {
|
||||
PathBuf::new()
|
||||
} else {
|
||||
dir.as_ref().to_path_buf()
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(mut read_dir) = read_dirs.pop() {
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
if entry.file_type().await?.is_dir() {
|
||||
read_dirs.push(fs::read_dir(&path).await?);
|
||||
}
|
||||
|
||||
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||
|
||||
if positive_globs.is_match(relative_path) && !negative_globs.is_match(relative_path) {
|
||||
paths.insert(if relative {
|
||||
relative_path.to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
/// Refreshes the sources asynchronously
|
||||
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
|
||||
project: &Project,
|
||||
|
@ -297,12 +449,21 @@ pub mod errors {
|
|||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// An invalid glob pattern was found
|
||||
#[error("invalid glob pattern")]
|
||||
Glob(#[from] glob::PatternError),
|
||||
|
||||
/// An error occurred while globbing
|
||||
#[error("error globbing")]
|
||||
Globbing(#[from] glob::GlobError),
|
||||
Globbing(#[from] MatchingGlobsError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when finding matching globs
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum MatchingGlobsError {
|
||||
/// An error occurred interacting with the filesystem
|
||||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// An error occurred while building a glob
|
||||
#[error("error building glob")]
|
||||
BuildGlob(#[from] wax::BuildError),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,10 +69,29 @@ pub fn generate_lib_linking_module<I: IntoIterator<Item = S>, S: AsRef<str>>(
|
|||
fn luau_style_path(path: &Path) -> String {
|
||||
let path = path
|
||||
.components()
|
||||
.filter_map(|ct| match ct {
|
||||
.zip(
|
||||
path.components()
|
||||
.skip(1)
|
||||
.map(Some)
|
||||
.chain(std::iter::repeat(None)),
|
||||
)
|
||||
.filter_map(|(ct, next_ct)| match ct {
|
||||
Component::CurDir => Some(".".to_string()),
|
||||
Component::ParentDir => Some("..".to_string()),
|
||||
Component::Normal(part) => Some(format!("{}", part.to_string_lossy())),
|
||||
Component::Normal(part) => {
|
||||
let str = part.to_string_lossy();
|
||||
|
||||
Some(
|
||||
(if next_ct.is_some() {
|
||||
&str
|
||||
} else {
|
||||
str.strip_suffix(".luau")
|
||||
.or_else(|| str.strip_suffix(".lua"))
|
||||
.unwrap_or(&str)
|
||||
})
|
||||
.to_string(),
|
||||
)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -98,10 +117,10 @@ pub fn get_lib_require_path(
|
|||
) -> Result<String, errors::GetLibRequirePath> {
|
||||
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||
let path = if use_new_structure {
|
||||
log::debug!("using new structure for require path with {:?}", lib_file);
|
||||
tracing::debug!("using new structure for require path with {lib_file:?}");
|
||||
lib_file.to_path(path)
|
||||
} else {
|
||||
log::debug!("using old structure for require path with {:?}", lib_file);
|
||||
tracing::debug!("using old structure for require path with {lib_file:?}");
|
||||
path
|
||||
};
|
||||
|
||||
|
@ -126,14 +145,26 @@ pub fn get_lib_require_path(
|
|||
|
||||
let path = path
|
||||
.components()
|
||||
.filter_map(|component| match component {
|
||||
.zip(
|
||||
path.components()
|
||||
.skip(1)
|
||||
.map(Some)
|
||||
.chain(std::iter::repeat(None)),
|
||||
)
|
||||
.filter_map(|(component, next_comp)| match component {
|
||||
Component::ParentDir => Some(".Parent".to_string()),
|
||||
Component::Normal(part) if part != "init.lua" && part != "init.luau" => {
|
||||
let str = part.to_string_lossy();
|
||||
|
||||
Some(format!(
|
||||
"[{:?}]",
|
||||
part.to_string_lossy()
|
||||
.trim_end_matches(".lua")
|
||||
.trim_end_matches(".luau")
|
||||
if next_comp.is_some() {
|
||||
&str
|
||||
} else {
|
||||
str.strip_suffix(".luau")
|
||||
.or_else(|| str.strip_suffix(".lua"))
|
||||
.unwrap_or(&str)
|
||||
}
|
||||
))
|
||||
}
|
||||
_ => None,
|
||||
|
@ -168,12 +199,30 @@ pub fn get_bin_require_path(
|
|||
luau_style_path(&path)
|
||||
}
|
||||
|
||||
/// Generate a linking module for a script
|
||||
pub fn generate_script_linking_module(require_path: &str) -> String {
|
||||
format!(r#"return require({require_path})"#)
|
||||
}
|
||||
|
||||
/// Get the require path for a script
|
||||
pub fn get_script_require_path(
|
||||
base_dir: &Path,
|
||||
script_file: &RelativePathBuf,
|
||||
destination_dir: &Path,
|
||||
) -> String {
|
||||
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
|
||||
let path = script_file.to_path(path);
|
||||
|
||||
luau_style_path(&path)
|
||||
}
|
||||
|
||||
/// Errors for the linking module utilities
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// An error occurred while getting the require path for a library
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetLibRequirePath {
|
||||
/// The path for the RobloxPlaceKind could not be found
|
||||
#[error("could not find the path for the RobloxPlaceKind {0}")]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::{
|
||||
linking::generator::get_file_types,
|
||||
lockfile::DownloadedGraph,
|
||||
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||
manifest::Manifest,
|
||||
names::PackageNames,
|
||||
scripts::{execute_script, ScriptName},
|
||||
source::{
|
||||
|
@ -8,14 +9,18 @@ use crate::{
|
|||
traits::PackageRef,
|
||||
version_id::VersionId,
|
||||
},
|
||||
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME,
|
||||
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
collections::HashMap,
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::task::spawn_blocking;
|
||||
use tracing::{instrument, Instrument};
|
||||
|
||||
/// Generates linking modules for a project
|
||||
pub mod generator;
|
||||
|
@ -29,231 +34,330 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
|
|||
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
||||
|
||||
match fs::remove_file(&destination).await {
|
||||
Ok(_) => {}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
fs::hard_link(cas_path(&hash, cas_dir), destination).await
|
||||
}
|
||||
|
||||
impl Project {
|
||||
/// Links the dependencies of the project
|
||||
#[instrument(skip(self, graph), level = "debug")]
|
||||
pub async fn link_dependencies(
|
||||
&self,
|
||||
graph: &DownloadedGraph,
|
||||
with_types: bool,
|
||||
) -> Result<(), errors::LinkingError> {
|
||||
let manifest = self.deser_manifest().await?;
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
let manifest = Arc::new(manifest);
|
||||
|
||||
let mut package_types = BTreeMap::<&PackageNames, BTreeMap<&VersionId, Vec<String>>>::new();
|
||||
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
||||
// we do this separately to allow the required tools for the scripts to be installed
|
||||
self.link(graph, &manifest, &Arc::new(Default::default()), false)
|
||||
.await?;
|
||||
|
||||
for (name, versions) in graph {
|
||||
for (version_id, node) in versions {
|
||||
let Some(lib_file) = node.target.lib_path() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let container_folder = node.node.container_folder(
|
||||
&self
|
||||
.package_dir()
|
||||
.join(manifest.target.kind().packages_folder(version_id.target()))
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
name,
|
||||
version_id.version(),
|
||||
);
|
||||
|
||||
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
||||
let lib_file = lib_file.to_path(&container_folder);
|
||||
|
||||
let contents = match fs::read_to_string(&lib_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Err(errors::LinkingError::LibFileNotFound(
|
||||
lib_file.display().to_string(),
|
||||
));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
let types = match get_file_types(&contents) {
|
||||
Ok(types) => types,
|
||||
Err(e) => {
|
||||
return Err(errors::LinkingError::FullMoon(
|
||||
lib_file.display().to_string(),
|
||||
e,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
log::debug!("{name}@{version_id} has {} exported types", types.len());
|
||||
|
||||
types
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
package_types
|
||||
.entry(name)
|
||||
.or_default()
|
||||
.insert(version_id, types);
|
||||
|
||||
if let Some(build_files) = Some(&node.target)
|
||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||
.and_then(|t| t.build_files())
|
||||
{
|
||||
let script_name = ScriptName::RobloxSyncConfigGenerator.to_string();
|
||||
|
||||
let Some(script_path) = manifest.scripts.get(&script_name) else {
|
||||
log::warn!("not having a `{script_name}` script in the manifest might cause issues with Roblox linking");
|
||||
continue;
|
||||
};
|
||||
|
||||
execute_script(
|
||||
ScriptName::RobloxSyncConfigGenerator,
|
||||
&script_path.to_path(self.package_dir()),
|
||||
std::iter::once(container_folder.as_os_str())
|
||||
.chain(build_files.iter().map(OsStr::new)),
|
||||
self,
|
||||
false,
|
||||
)
|
||||
.map_err(|e| {
|
||||
errors::LinkingError::GenerateRobloxSyncConfig(
|
||||
container_folder.display().to_string(),
|
||||
e,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
if !with_types {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for (name, versions) in graph {
|
||||
for (version_id, node) in versions {
|
||||
let (node_container_folder, node_packages_folder) = {
|
||||
let base_folder = create_and_canonicalize(
|
||||
self.package_dir()
|
||||
.join(manifest.target.kind().packages_folder(version_id.target())),
|
||||
)
|
||||
.await?;
|
||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||
// step 2. extract the types from libraries, prepare Roblox packages for syncing
|
||||
let roblox_sync_config_gen_script = manifest
|
||||
.scripts
|
||||
.get(&ScriptName::RobloxSyncConfigGenerator.to_string());
|
||||
|
||||
let package_types = try_join_all(graph.iter().map(|(name, versions)| async move {
|
||||
Ok::<_, errors::LinkingError>((
|
||||
name,
|
||||
try_join_all(versions.iter().map(|(version_id, node)| async move {
|
||||
let Some(lib_file) = node.target.lib_path() else {
|
||||
return Ok((version_id, vec![]));
|
||||
};
|
||||
|
||||
let container_folder = node.node.container_folder(
|
||||
&packages_container_folder,
|
||||
&self
|
||||
.package_dir()
|
||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
name,
|
||||
version_id.version(),
|
||||
);
|
||||
|
||||
if let Some((alias, _)) = &node.node.direct.as_ref() {
|
||||
if let Some((lib_file, types)) =
|
||||
node.target.lib_path().and_then(|lib_file| {
|
||||
package_types
|
||||
.get(name)
|
||||
.and_then(|v| v.get(version_id))
|
||||
.map(|types| (lib_file, types))
|
||||
})
|
||||
{
|
||||
write_cas(
|
||||
base_folder.join(format!("{alias}.luau")),
|
||||
self.cas_dir(),
|
||||
&generator::generate_lib_linking_module(
|
||||
&generator::get_lib_require_path(
|
||||
&node.target.kind(),
|
||||
&base_folder,
|
||||
lib_file,
|
||||
&container_folder,
|
||||
node.node.pkg_ref.use_new_structure(),
|
||||
&base_folder,
|
||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||
&manifest,
|
||||
)?,
|
||||
types,
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
||||
let lib_file = lib_file.to_path(&container_folder);
|
||||
|
||||
let contents = match fs::read_to_string(&lib_file).await {
|
||||
Ok(contents) => contents,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Err(errors::LinkingError::LibFileNotFound(
|
||||
lib_file.display().to_string(),
|
||||
));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
if let Some(bin_file) = node.target.bin_path() {
|
||||
write_cas(
|
||||
base_folder.join(format!("{alias}.bin.luau")),
|
||||
self.cas_dir(),
|
||||
&generator::generate_bin_linking_module(
|
||||
&container_folder,
|
||||
&generator::get_bin_require_path(
|
||||
&base_folder,
|
||||
bin_file,
|
||||
&container_folder,
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
let types = match spawn_blocking(move || get_file_types(&contents))
|
||||
.await
|
||||
.unwrap()
|
||||
{
|
||||
Ok(types) => types,
|
||||
Err(e) => {
|
||||
return Err(errors::LinkingError::FullMoon(
|
||||
lib_file.display().to_string(),
|
||||
e,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!("contains {} exported types", types.len());
|
||||
|
||||
types
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
if let Some(build_files) = Some(&node.target)
|
||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||
.and_then(|t| t.build_files())
|
||||
{
|
||||
let Some(script_path) = roblox_sync_config_gen_script else {
|
||||
tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
|
||||
return Ok((version_id, types));
|
||||
};
|
||||
|
||||
execute_script(
|
||||
ScriptName::RobloxSyncConfigGenerator,
|
||||
&script_path.to_path(self.package_dir()),
|
||||
std::iter::once(container_folder.as_os_str())
|
||||
.chain(build_files.iter().map(OsStr::new)),
|
||||
self,
|
||||
false,
|
||||
).await
|
||||
.map_err(|e| {
|
||||
errors::LinkingError::GenerateRobloxSyncConfig(
|
||||
container_folder.display().to_string(),
|
||||
e,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
(container_folder, base_folder)
|
||||
};
|
||||
Ok((version_id, types))
|
||||
}.instrument(tracing::debug_span!("extract types", name = name.to_string(), version_id = version_id.to_string()))))
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect::<HashMap<_, _>>(),
|
||||
))
|
||||
}))
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
for (dependency_name, (dependency_version_id, dependency_alias)) in
|
||||
&node.node.dependencies
|
||||
{
|
||||
let Some(dependency_node) = graph
|
||||
.get(dependency_name)
|
||||
.and_then(|v| v.get(dependency_version_id))
|
||||
else {
|
||||
return Err(errors::LinkingError::DependencyNotFound(
|
||||
dependency_name.to_string(),
|
||||
dependency_version_id.to_string(),
|
||||
));
|
||||
};
|
||||
// step 3. link all packages (and their dependencies), this time with types
|
||||
self.link(graph, &manifest, &Arc::new(package_types), true)
|
||||
.await
|
||||
}
|
||||
|
||||
let Some(lib_file) = dependency_node.target.lib_path() else {
|
||||
continue;
|
||||
};
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn link_files(
|
||||
&self,
|
||||
base_folder: &Path,
|
||||
container_folder: &Path,
|
||||
root_container_folder: &Path,
|
||||
relative_container_folder: &Path,
|
||||
node: &DownloadedDependencyGraphNode,
|
||||
name: &PackageNames,
|
||||
version_id: &VersionId,
|
||||
alias: &str,
|
||||
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>,
|
||||
manifest: &Manifest,
|
||||
) -> Result<(), errors::LinkingError> {
|
||||
static NO_TYPES: Vec<String> = Vec::new();
|
||||
|
||||
let base_folder = create_and_canonicalize(
|
||||
self.package_dir().join(
|
||||
version_id
|
||||
.target()
|
||||
.packages_folder(dependency_version_id.target()),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||
if let Some(lib_file) = node.target.lib_path() {
|
||||
let lib_module = generator::generate_lib_linking_module(
|
||||
&generator::get_lib_require_path(
|
||||
&node.target.kind(),
|
||||
base_folder,
|
||||
lib_file,
|
||||
container_folder,
|
||||
node.node.pkg_ref.use_new_structure(),
|
||||
root_container_folder,
|
||||
relative_container_folder,
|
||||
manifest,
|
||||
)?,
|
||||
package_types
|
||||
.get(name)
|
||||
.and_then(|v| v.get(version_id))
|
||||
.unwrap_or(&NO_TYPES),
|
||||
);
|
||||
|
||||
let container_folder = dependency_node.node.container_folder(
|
||||
&packages_container_folder,
|
||||
dependency_name,
|
||||
dependency_version_id.version(),
|
||||
);
|
||||
write_cas(
|
||||
base_folder.join(format!("{alias}.luau")),
|
||||
self.cas_dir(),
|
||||
&lib_module,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let linker_folder = create_and_canonicalize(
|
||||
node_container_folder.join(
|
||||
node.node
|
||||
.base_folder(version_id, dependency_node.target.kind()),
|
||||
),
|
||||
)
|
||||
if let Some(bin_file) = node.target.bin_path() {
|
||||
let bin_module = generator::generate_bin_linking_module(
|
||||
container_folder,
|
||||
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
||||
);
|
||||
|
||||
write_cas(
|
||||
base_folder.join(format!("{alias}.bin.luau")),
|
||||
self.cas_dir(),
|
||||
&bin_module,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
|
||||
let scripts_base =
|
||||
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
||||
.await?;
|
||||
|
||||
write_cas(
|
||||
linker_folder.join(format!("{dependency_alias}.luau")),
|
||||
self.cas_dir(),
|
||||
&generator::generate_lib_linking_module(
|
||||
&generator::get_lib_require_path(
|
||||
&dependency_node.target.kind(),
|
||||
&linker_folder,
|
||||
lib_file,
|
||||
&container_folder,
|
||||
dependency_node.node.pkg_ref.use_new_structure(),
|
||||
&node_packages_folder,
|
||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||
&manifest,
|
||||
)?,
|
||||
package_types
|
||||
.get(dependency_name)
|
||||
.and_then(|v| v.get(dependency_version_id))
|
||||
.unwrap(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
for (script_name, script_path) in scripts {
|
||||
let script_module =
|
||||
generator::generate_script_linking_module(&generator::get_script_require_path(
|
||||
&scripts_base,
|
||||
script_path,
|
||||
container_folder,
|
||||
));
|
||||
|
||||
write_cas(
|
||||
scripts_base.join(format!("{script_name}.luau")),
|
||||
self.cas_dir(),
|
||||
&script_module,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn link(
|
||||
&self,
|
||||
graph: &DownloadedGraph,
|
||||
manifest: &Arc<Manifest>,
|
||||
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
|
||||
is_complete: bool,
|
||||
) -> Result<(), errors::LinkingError> {
|
||||
try_join_all(graph.iter().flat_map(|(name, versions)| {
|
||||
versions.iter().map(|(version_id, node)| {
|
||||
let name = name.clone();
|
||||
let manifest = manifest.clone();
|
||||
let package_types = package_types.clone();
|
||||
|
||||
let span = tracing::info_span!(
|
||||
"link",
|
||||
name = name.to_string(),
|
||||
version_id = version_id.to_string()
|
||||
);
|
||||
|
||||
async move {
|
||||
let (node_container_folder, node_packages_folder) = {
|
||||
let base_folder = create_and_canonicalize(
|
||||
self.package_dir()
|
||||
.join(manifest.target.kind().packages_folder(version_id.target())),
|
||||
)
|
||||
.await?;
|
||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||
|
||||
let container_folder = node.node.container_folder(
|
||||
&packages_container_folder,
|
||||
&name,
|
||||
version_id.version(),
|
||||
);
|
||||
|
||||
if let Some((alias, _, _)) = &node.node.direct {
|
||||
self.link_files(
|
||||
&base_folder,
|
||||
&container_folder,
|
||||
&base_folder,
|
||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||
node,
|
||||
&name,
|
||||
version_id,
|
||||
alias,
|
||||
&package_types,
|
||||
&manifest,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
(container_folder, base_folder)
|
||||
};
|
||||
|
||||
for (dependency_name, (dependency_version_id, dependency_alias)) in
|
||||
&node.node.dependencies
|
||||
{
|
||||
let Some(dependency_node) = graph
|
||||
.get(dependency_name)
|
||||
.and_then(|v| v.get(dependency_version_id))
|
||||
else {
|
||||
if is_complete {
|
||||
return Err(errors::LinkingError::DependencyNotFound(
|
||||
format!("{dependency_name}@{dependency_version_id}"),
|
||||
format!("{name}@{version_id}"),
|
||||
));
|
||||
}
|
||||
|
||||
continue;
|
||||
};
|
||||
|
||||
let base_folder = create_and_canonicalize(
|
||||
self.package_dir().join(
|
||||
version_id
|
||||
.target()
|
||||
.packages_folder(dependency_version_id.target()),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
|
||||
|
||||
let container_folder = dependency_node.node.container_folder(
|
||||
&packages_container_folder,
|
||||
dependency_name,
|
||||
dependency_version_id.version(),
|
||||
);
|
||||
|
||||
let linker_folder = create_and_canonicalize(
|
||||
node_container_folder.join(
|
||||
node.node
|
||||
.base_folder(version_id, dependency_node.target.kind()),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
self.link_files(
|
||||
&linker_folder,
|
||||
&container_folder,
|
||||
&node_packages_folder,
|
||||
container_folder.strip_prefix(&base_folder).unwrap(),
|
||||
dependency_node,
|
||||
dependency_name,
|
||||
dependency_version_id,
|
||||
dependency_alias,
|
||||
&package_types,
|
||||
&manifest,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.instrument(span)
|
||||
})
|
||||
}))
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur while linking dependencies
|
||||
|
@ -273,7 +377,7 @@ pub mod errors {
|
|||
Io(#[from] std::io::Error),
|
||||
|
||||
/// A dependency was not found
|
||||
#[error("dependency not found: {0}@{1}")]
|
||||
#[error("dependency `{0}` of `{1}` not found")]
|
||||
DependencyNotFound(String, String),
|
||||
|
||||
/// The library file was not found
|
||||
|
|
|
@ -14,7 +14,7 @@ use relative_path::RelativePathBuf;
|
|||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::{btree_map::Entry, BTreeMap},
|
||||
collections::BTreeMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
|
@ -24,14 +24,17 @@ pub type Graph<Node> = BTreeMap<PackageNames, BTreeMap<VersionId, Node>>;
|
|||
/// A dependency graph node
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct DependencyGraphNode {
|
||||
/// The alias and specifiers for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub direct: Option<(String, DependencySpecifiers)>,
|
||||
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
||||
/// The dependencies of the package
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
|
||||
/// The type of the dependency
|
||||
pub ty: DependencyType,
|
||||
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||
pub resolved_ty: DependencyType,
|
||||
/// Whether the resolved type should be Peer if this isn't depended on
|
||||
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||
pub is_peer: bool,
|
||||
/// The package reference
|
||||
pub pkg_ref: PackageRefs,
|
||||
}
|
||||
|
@ -52,6 +55,18 @@ impl DependencyGraphNode {
|
|||
name: &PackageNames,
|
||||
version: &Version,
|
||||
) -> PathBuf {
|
||||
if self.pkg_ref.like_wally() {
|
||||
return path
|
||||
.as_ref()
|
||||
.join(format!(
|
||||
"{}_{}@{}",
|
||||
name.as_str().0,
|
||||
name.as_str().1,
|
||||
version
|
||||
))
|
||||
.join(name.as_str().1);
|
||||
}
|
||||
|
||||
path.as_ref()
|
||||
.join(name.escaped())
|
||||
.join(version.to_string())
|
||||
|
@ -62,45 +77,6 @@ impl DependencyGraphNode {
|
|||
/// A graph of `DependencyGraphNode`s
|
||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||
|
||||
pub(crate) fn insert_node(
|
||||
graph: &mut DependencyGraph,
|
||||
name: PackageNames,
|
||||
version: VersionId,
|
||||
mut node: DependencyGraphNode,
|
||||
is_top_level: bool,
|
||||
) {
|
||||
if !is_top_level && node.direct.take().is_some() {
|
||||
log::debug!(
|
||||
"tried to insert {name}@{version} as direct dependency from a non top-level context",
|
||||
);
|
||||
}
|
||||
|
||||
match graph
|
||||
.entry(name.clone())
|
||||
.or_default()
|
||||
.entry(version.clone())
|
||||
{
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(node);
|
||||
}
|
||||
Entry::Occupied(existing) => {
|
||||
let current_node = existing.into_mut();
|
||||
|
||||
match (¤t_node.direct, &node.direct) {
|
||||
(Some(_), Some(_)) => {
|
||||
log::warn!("duplicate direct dependency for {name}@{version}");
|
||||
}
|
||||
|
||||
(None, Some(_)) => {
|
||||
current_node.direct = node.direct;
|
||||
}
|
||||
|
||||
(_, _) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct DownloadedDependencyGraphNode {
|
||||
|
|
109
src/main.rs
109
src/main.rs
|
@ -1,27 +1,37 @@
|
|||
#[cfg(feature = "version-management")]
|
||||
use crate::cli::version::{check_for_updates, get_or_download_version};
|
||||
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
|
||||
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
||||
use anyhow::Context;
|
||||
use clap::Parser;
|
||||
use clap::{builder::styling::AnsiColor, Parser};
|
||||
use fs_err::tokio as fs;
|
||||
use indicatif::MultiProgress;
|
||||
use indicatif_log_bridge::LogWrapper;
|
||||
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use tracing::instrument;
|
||||
use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
|
||||
use tracing_subscriber::{
|
||||
filter::LevelFilter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer,
|
||||
};
|
||||
|
||||
mod cli;
|
||||
pub mod util;
|
||||
|
||||
const STYLES: clap::builder::Styles = clap::builder::Styles::styled()
|
||||
.header(AnsiColor::Yellow.on_default().underline())
|
||||
.usage(AnsiColor::Yellow.on_default().underline())
|
||||
.literal(AnsiColor::Green.on_default().bold())
|
||||
.placeholder(AnsiColor::Cyan.on_default());
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(
|
||||
version,
|
||||
about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||
about = "A package manager for the Luau programming language",
|
||||
long_about = "A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune"
|
||||
)]
|
||||
#[command(disable_version_flag = true)]
|
||||
#[command(disable_version_flag = true, styles = STYLES)]
|
||||
struct Cli {
|
||||
/// Print version
|
||||
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
|
||||
|
@ -31,6 +41,7 @@ struct Cli {
|
|||
subcommand: cli::commands::Subcommand,
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
async fn get_linkable_dir(path: &Path) -> PathBuf {
|
||||
let mut curr_path = PathBuf::new();
|
||||
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
|
||||
|
@ -61,7 +72,7 @@ async fn get_linkable_dir(path: &Path) -> PathBuf {
|
|||
|
||||
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
|
||||
if let Err(err) = fs::remove_file(&try_path).await {
|
||||
log::warn!(
|
||||
tracing::warn!(
|
||||
"failed to remove temporary file at {}: {err}",
|
||||
try_path.display()
|
||||
);
|
||||
|
@ -107,7 +118,12 @@ async fn run() -> anyhow::Result<()> {
|
|||
// on unix systems
|
||||
let status = std::process::Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(exe.with_extension(""))
|
||||
.arg(
|
||||
exe.parent()
|
||||
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
|
||||
.unwrap_or(exe)
|
||||
.with_extension("luau"),
|
||||
)
|
||||
.arg("--")
|
||||
.args(std::env::args_os().skip(1))
|
||||
.current_dir(cwd)
|
||||
|
@ -117,6 +133,39 @@ async fn run() -> anyhow::Result<()> {
|
|||
std::process::exit(status.code().unwrap());
|
||||
}
|
||||
|
||||
let indicatif_layer = IndicatifLayer::new().with_filter(IndicatifFilter::new(false));
|
||||
|
||||
let tracing_env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy()
|
||||
.add_directive("reqwest=info".parse().unwrap())
|
||||
.add_directive("rustls=info".parse().unwrap())
|
||||
.add_directive("tokio_util=info".parse().unwrap())
|
||||
.add_directive("goblin=info".parse().unwrap())
|
||||
.add_directive("tower=info".parse().unwrap())
|
||||
.add_directive("hyper=info".parse().unwrap())
|
||||
.add_directive("h2=info".parse().unwrap());
|
||||
|
||||
let fmt_layer =
|
||||
tracing_subscriber::fmt::layer().with_writer(indicatif_layer.inner().get_stderr_writer());
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let fmt_layer = fmt_layer.with_timer(tracing_subscriber::fmt::time::uptime());
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
let fmt_layer = fmt_layer
|
||||
.pretty()
|
||||
.with_timer(())
|
||||
.with_line_number(false)
|
||||
.with_file(false)
|
||||
.with_target(false);
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_env_filter)
|
||||
.with(fmt_layer)
|
||||
.with(indicatif_layer)
|
||||
.init();
|
||||
|
||||
let (project_root_dir, project_workspace_dir) = 'finder: {
|
||||
let mut current_path = Some(cwd.clone());
|
||||
let mut project_root = None::<PathBuf>;
|
||||
|
@ -133,17 +182,14 @@ async fn run() -> anyhow::Result<()> {
|
|||
return Ok(HashSet::new());
|
||||
}
|
||||
|
||||
manifest
|
||||
.workspace_members
|
||||
.iter()
|
||||
.map(|member| path.join(member))
|
||||
.map(|p| glob::glob(&p.to_string_lossy()))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.context("invalid glob patterns")?
|
||||
.into_iter()
|
||||
.flat_map(|paths| paths.into_iter())
|
||||
.collect::<Result<HashSet<_>, _>>()
|
||||
.context("failed to expand glob patterns")
|
||||
matching_globs(
|
||||
path,
|
||||
manifest.workspace_members.iter().map(|s| s.as_str()),
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.context("failed to get workspace members")
|
||||
}
|
||||
|
||||
while let Some(path) = current_path {
|
||||
|
@ -182,16 +228,13 @@ async fn run() -> anyhow::Result<()> {
|
|||
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
|
||||
};
|
||||
|
||||
let multi = {
|
||||
let logger = pretty_env_logger::formatted_builder()
|
||||
.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"))
|
||||
.build();
|
||||
let multi = MultiProgress::new();
|
||||
|
||||
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
|
||||
|
||||
multi
|
||||
};
|
||||
tracing::trace!(
|
||||
"project root: {}\nworkspace root: {}",
|
||||
project_root_dir.display(),
|
||||
project_workspace_dir
|
||||
.as_ref()
|
||||
.map_or("none".to_string(), |p| p.display().to_string())
|
||||
);
|
||||
|
||||
let home_dir = home_dir()?;
|
||||
let data_dir = home_dir.join("data");
|
||||
|
@ -208,7 +251,7 @@ async fn run() -> anyhow::Result<()> {
|
|||
}
|
||||
.join("cas");
|
||||
|
||||
log::debug!("using cas dir in {}", cas_dir.display());
|
||||
tracing::debug!("using cas dir in {}", cas_dir.display());
|
||||
|
||||
let project = Project::new(
|
||||
project_root_dir,
|
||||
|
@ -247,7 +290,7 @@ async fn run() -> anyhow::Result<()> {
|
|||
.and_then(|manifest| manifest.pesde_version);
|
||||
|
||||
let exe_path = if let Some(version) = target_version {
|
||||
get_or_download_version(&reqwest, &version, false).await?
|
||||
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -269,7 +312,7 @@ async fn run() -> anyhow::Result<()> {
|
|||
|
||||
let cli = Cli::parse();
|
||||
|
||||
cli.subcommand.run(project, multi, reqwest).await
|
||||
cli.subcommand.run(project, reqwest).await
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use relative_path::RelativePathBuf;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
manifest::{overrides::OverrideKey, target::Target},
|
||||
names::PackageName,
|
||||
source::specifiers::DependencySpecifiers,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use tracing::instrument;
|
||||
|
||||
/// Overrides
|
||||
pub mod overrides;
|
||||
|
@ -45,7 +44,7 @@ pub struct Manifest {
|
|||
/// The indices to use for the package
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "crate::util::serialize_gix_url_map",
|
||||
skip_serializing,
|
||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||
)]
|
||||
pub indices: BTreeMap<String, gix::Url>,
|
||||
|
@ -53,8 +52,7 @@ pub struct Manifest {
|
|||
#[cfg(feature = "wally-compat")]
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "BTreeMap::is_empty",
|
||||
serialize_with = "crate::util::serialize_gix_url_map",
|
||||
skip_serializing,
|
||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||
)]
|
||||
pub wally_indices: BTreeMap<String, gix::Url>,
|
||||
|
@ -63,7 +61,7 @@ pub struct Manifest {
|
|||
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
|
||||
/// The files to include in the package
|
||||
#[serde(default)]
|
||||
pub includes: BTreeSet<String>,
|
||||
pub includes: Vec<String>,
|
||||
/// The patches to apply to packages
|
||||
#[cfg(feature = "patches")]
|
||||
#[serde(default, skip_serializing)]
|
||||
|
@ -90,6 +88,9 @@ pub struct Manifest {
|
|||
/// The dev dependencies of the package
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
|
||||
/// The user-defined fields of the package
|
||||
#[serde(flatten)]
|
||||
pub user_defined_fields: HashMap<String, toml::Value>,
|
||||
}
|
||||
|
||||
/// A dependency type
|
||||
|
@ -106,6 +107,7 @@ pub enum DependencyType {
|
|||
|
||||
impl Manifest {
|
||||
/// Get all dependencies from the manifest
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub fn all_dependencies(
|
||||
&self,
|
||||
) -> Result<
|
||||
|
|
|
@ -2,7 +2,7 @@ use relative_path::RelativePathBuf;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||
use std::{
|
||||
collections::BTreeSet,
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::{Display, Formatter},
|
||||
str::FromStr,
|
||||
};
|
||||
|
@ -68,6 +68,11 @@ impl TargetKind {
|
|||
|
||||
format!("{dependency}_packages")
|
||||
}
|
||||
|
||||
/// Returns whether this target is a Roblox target
|
||||
pub fn is_roblox(&self) -> bool {
|
||||
matches!(self, TargetKind::Roblox | TargetKind::RobloxServer)
|
||||
}
|
||||
}
|
||||
|
||||
/// A target of a package
|
||||
|
@ -77,7 +82,7 @@ pub enum Target {
|
|||
/// A Roblox target
|
||||
Roblox {
|
||||
/// The path to the lib export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
lib: Option<RelativePathBuf>,
|
||||
/// The files to include in the sync tool's config
|
||||
#[serde(default)]
|
||||
|
@ -86,7 +91,7 @@ pub enum Target {
|
|||
/// A Roblox server target
|
||||
RobloxServer {
|
||||
/// The path to the lib export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
lib: Option<RelativePathBuf>,
|
||||
/// The files to include in the sync tool's config
|
||||
#[serde(default)]
|
||||
|
@ -95,20 +100,26 @@ pub enum Target {
|
|||
/// A Lune target
|
||||
Lune {
|
||||
/// The path to the lib export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
lib: Option<RelativePathBuf>,
|
||||
/// The path to the bin export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bin: Option<RelativePathBuf>,
|
||||
/// The exported scripts
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
scripts: BTreeMap<String, RelativePathBuf>,
|
||||
},
|
||||
/// A Luau target
|
||||
Luau {
|
||||
/// The path to the lib export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
lib: Option<RelativePathBuf>,
|
||||
/// The path to the bin export file
|
||||
#[serde(default)]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bin: Option<RelativePathBuf>,
|
||||
/// The exported scripts
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
scripts: BTreeMap<String, RelativePathBuf>,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -151,6 +162,15 @@ impl Target {
|
|||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the scripts exported by this target
|
||||
pub fn scripts(&self) -> Option<&BTreeMap<String, RelativePathBuf>> {
|
||||
match self {
|
||||
Target::Lune { scripts, .. } => Some(scripts),
|
||||
Target::Luau { scripts, .. } => Some(scripts),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Target {
|
||||
|
|
22
src/names.rs
22
src/names.rs
|
@ -35,8 +35,16 @@ impl FromStr for PackageName {
|
|||
.ok_or(Self::Err::InvalidFormat(s.to_string()))?;
|
||||
|
||||
for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] {
|
||||
if part.len() < 3 || part.len() > 32 {
|
||||
return Err(Self::Err::InvalidLength(reason, part.to_string()));
|
||||
let min_len = match reason {
|
||||
ErrorReason::Scope => 3,
|
||||
ErrorReason::Name => 1,
|
||||
};
|
||||
|
||||
if !(min_len..=32).contains(&part.len()) {
|
||||
return Err(match reason {
|
||||
ErrorReason::Scope => Self::Err::InvalidScopeLength(part.to_string()),
|
||||
ErrorReason::Name => Self::Err::InvalidNameLength(part.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
if part.chars().all(|c| c.is_ascii_digit()) {
|
||||
|
@ -231,9 +239,13 @@ pub mod errors {
|
|||
#[error("package {0} `{1}` starts or ends with an underscore")]
|
||||
PrePostfixUnderscore(ErrorReason, String),
|
||||
|
||||
/// The package name is not within 3-32 characters long
|
||||
#[error("package {0} `{1}` is not within 3-32 characters long")]
|
||||
InvalidLength(ErrorReason, String),
|
||||
/// The package name's scope part is not within 3-32 characters long
|
||||
#[error("package scope `{0}` is not within 3-32 characters long")]
|
||||
InvalidScopeLength(String),
|
||||
|
||||
/// The package name's name part is not within 1-32 characters long
|
||||
#[error("package name `{0}` is not within 1-32 characters long")]
|
||||
InvalidNameLength(String),
|
||||
}
|
||||
|
||||
/// Errors that can occur when working with Wally package names
|
||||
|
|
|
@ -3,6 +3,7 @@ use fs_err::tokio as fs;
|
|||
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::path::Path;
|
||||
use tracing::instrument;
|
||||
|
||||
/// Set up a git repository for patches
|
||||
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
||||
|
@ -69,6 +70,7 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
|||
|
||||
impl Project {
|
||||
/// Apply patches to the project's dependencies
|
||||
#[instrument(skip(self, graph), level = "debug")]
|
||||
pub async fn apply_patches(
|
||||
&self,
|
||||
graph: &DownloadedGraph,
|
||||
|
@ -97,7 +99,7 @@ impl Project {
|
|||
.get(&name)
|
||||
.and_then(|versions| versions.get(&version_id))
|
||||
else {
|
||||
log::warn!(
|
||||
tracing::warn!(
|
||||
"patch for {name}@{version_id} not applied because it is not in the graph"
|
||||
);
|
||||
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
|
||||
|
@ -114,7 +116,7 @@ impl Project {
|
|||
);
|
||||
|
||||
tokio::spawn(async move {
|
||||
log::debug!("applying patch to {name}@{version_id}");
|
||||
tracing::debug!("applying patch to {name}@{version_id}");
|
||||
|
||||
let patch = match fs::read(&patch_path).await {
|
||||
Ok(patch) => patch,
|
||||
|
@ -195,7 +197,9 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
|
||||
tracing::debug!(
|
||||
"patch applied to {name}@{version_id}, removing .git directory"
|
||||
);
|
||||
|
||||
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
|
||||
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))
|
||||
|
|
514
src/resolver.rs
514
src/resolver.rs
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
lockfile::{insert_node, DependencyGraph, DependencyGraphNode},
|
||||
lockfile::{DependencyGraph, DependencyGraphNode},
|
||||
manifest::DependencyType,
|
||||
names::PackageNames,
|
||||
source::{
|
||||
|
@ -11,14 +11,61 @@ use crate::{
|
|||
},
|
||||
Project, DEFAULT_INDEX_NAME,
|
||||
};
|
||||
use std::collections::{HashMap, HashSet, VecDeque};
|
||||
use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque};
|
||||
use tracing::{instrument, Instrument};
|
||||
|
||||
fn insert_node(
|
||||
graph: &mut DependencyGraph,
|
||||
name: PackageNames,
|
||||
version: VersionId,
|
||||
mut node: DependencyGraphNode,
|
||||
is_top_level: bool,
|
||||
) {
|
||||
if !is_top_level && node.direct.take().is_some() {
|
||||
tracing::debug!(
|
||||
"tried to insert {name}@{version} as direct dependency from a non top-level context",
|
||||
);
|
||||
}
|
||||
|
||||
match graph
|
||||
.entry(name.clone())
|
||||
.or_default()
|
||||
.entry(version.clone())
|
||||
{
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(node);
|
||||
}
|
||||
Entry::Occupied(existing) => {
|
||||
let current_node = existing.into_mut();
|
||||
|
||||
match (¤t_node.direct, &node.direct) {
|
||||
(Some(_), Some(_)) => {
|
||||
tracing::warn!("duplicate direct dependency for {name}@{version}");
|
||||
}
|
||||
|
||||
(None, Some(_)) => {
|
||||
current_node.direct = node.direct;
|
||||
}
|
||||
|
||||
(_, _) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Project {
|
||||
/// Create a dependency graph from the project's manifest
|
||||
#[instrument(
|
||||
skip(self, previous_graph, refreshed_sources),
|
||||
ret(level = "trace"),
|
||||
level = "debug"
|
||||
)]
|
||||
pub async fn dependency_graph(
|
||||
&self,
|
||||
previous_graph: Option<&DependencyGraph>,
|
||||
refreshed_sources: &mut HashSet<PackageSources>,
|
||||
// used by `x` command - if true, specifier indices are expected to be URLs. will not do peer dependency checks
|
||||
is_published_package: bool,
|
||||
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
|
||||
let manifest = self
|
||||
.deser_manifest()
|
||||
|
@ -37,7 +84,7 @@ impl Project {
|
|||
if let Some(previous_graph) = previous_graph {
|
||||
for (name, versions) in previous_graph {
|
||||
for (version, node) in versions {
|
||||
let Some((_, specifier)) = &node.direct else {
|
||||
let Some((old_alias, specifier, source_ty)) = &node.direct else {
|
||||
// this is not a direct dependency, will be added if it's still being used later
|
||||
continue;
|
||||
};
|
||||
|
@ -47,20 +94,24 @@ impl Project {
|
|||
continue;
|
||||
}
|
||||
|
||||
let Some(alias) = all_specifiers.remove(&(specifier.clone(), node.ty)) else {
|
||||
log::debug!(
|
||||
"dependency {name}@{version} from old dependency graph is no longer in the manifest",
|
||||
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
|
||||
else {
|
||||
tracing::debug!(
|
||||
"dependency {name}@{version} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
log::debug!("resolved {}@{} from old dependency graph", name, version);
|
||||
let span = tracing::info_span!("resolve from old graph", alias);
|
||||
let _guard = span.enter();
|
||||
|
||||
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
|
||||
insert_node(
|
||||
&mut graph,
|
||||
name.clone(),
|
||||
version.clone(),
|
||||
DependencyGraphNode {
|
||||
direct: Some((alias.clone(), specifier.clone())),
|
||||
direct: Some((alias.clone(), specifier.clone(), *source_ty)),
|
||||
..node.clone()
|
||||
},
|
||||
true,
|
||||
|
@ -69,22 +120,24 @@ impl Project {
|
|||
let mut queue = node
|
||||
.dependencies
|
||||
.iter()
|
||||
.map(|(name, (version, _))| (name, version, 0usize))
|
||||
.map(|(name, (version, dep_alias))| {
|
||||
(
|
||||
name,
|
||||
version,
|
||||
vec![alias.to_string(), dep_alias.to_string()],
|
||||
)
|
||||
})
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
while let Some((dep_name, dep_version, depth)) = queue.pop_front() {
|
||||
while let Some((dep_name, dep_version, path)) = queue.pop_front() {
|
||||
let inner_span =
|
||||
tracing::info_span!("resolve dependency", path = path.join(">"));
|
||||
let _inner_guard = inner_span.enter();
|
||||
if let Some(dep_node) = previous_graph
|
||||
.get(dep_name)
|
||||
.and_then(|v| v.get(dep_version))
|
||||
{
|
||||
log::debug!(
|
||||
"{}resolved dependency {}@{} from {}@{}",
|
||||
"\t".repeat(depth),
|
||||
dep_name,
|
||||
dep_version,
|
||||
name,
|
||||
version
|
||||
);
|
||||
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
|
||||
insert_node(
|
||||
&mut graph,
|
||||
dep_name.clone(),
|
||||
|
@ -96,15 +149,20 @@ impl Project {
|
|||
dep_node
|
||||
.dependencies
|
||||
.iter()
|
||||
.map(|(name, (version, _))| (name, version, depth + 1))
|
||||
.map(|(name, (version, alias))| {
|
||||
(
|
||||
name,
|
||||
version,
|
||||
path.iter()
|
||||
.cloned()
|
||||
.chain(std::iter::once(alias.to_string()))
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.for_each(|dep| queue.push_back(dep));
|
||||
} else {
|
||||
log::warn!(
|
||||
"dependency {}@{} from {}@{} not found in previous graph",
|
||||
dep_name,
|
||||
dep_version,
|
||||
name,
|
||||
version
|
||||
tracing::warn!(
|
||||
"dependency {dep_name}@{dep_version} not found in previous graph"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -116,7 +174,6 @@ impl Project {
|
|||
.into_iter()
|
||||
.map(|((spec, ty), alias)| {
|
||||
(
|
||||
alias.to_string(),
|
||||
spec,
|
||||
ty,
|
||||
None::<(PackageNames, VersionId)>,
|
||||
|
@ -127,216 +184,233 @@ impl Project {
|
|||
})
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
while let Some((alias, specifier, ty, dependant, path, overridden, target)) =
|
||||
queue.pop_front()
|
||||
{
|
||||
let depth = path.len() - 1;
|
||||
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
|
||||
async {
|
||||
let alias = path.last().unwrap().clone();
|
||||
let depth = path.len() - 1;
|
||||
|
||||
log::debug!(
|
||||
"{}resolving {specifier} ({alias}) from {dependant:?}",
|
||||
"\t".repeat(depth)
|
||||
);
|
||||
let source = match &specifier {
|
||||
DependencySpecifiers::Pesde(specifier) => {
|
||||
let index_url = if depth == 0 || overridden {
|
||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||
tracing::debug!("resolving {specifier} ({ty:?})");
|
||||
let source = match &specifier {
|
||||
DependencySpecifiers::Pesde(specifier) => {
|
||||
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||
|
||||
manifest
|
||||
.indices
|
||||
.get(index_name)
|
||||
.ok_or(errors::DependencyGraphError::IndexNotFound(
|
||||
index_name.to_string(),
|
||||
))?
|
||||
.clone()
|
||||
manifest
|
||||
.indices
|
||||
.get(index_name)
|
||||
.ok_or(errors::DependencyGraphError::IndexNotFound(
|
||||
index_name.to_string(),
|
||||
))?
|
||||
.clone()
|
||||
} else {
|
||||
let index_url = specifier.index.clone().unwrap();
|
||||
|
||||
index_url
|
||||
.clone()
|
||||
.try_into()
|
||||
// specifiers in indices store the index url in this field
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
PackageSources::Pesde(PesdePackageSource::new(index_url))
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
let index_url = if !is_published_package && (depth == 0 || overridden) {
|
||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||
|
||||
manifest
|
||||
.wally_indices
|
||||
.get(index_name)
|
||||
.ok_or(errors::DependencyGraphError::WallyIndexNotFound(
|
||||
index_name.to_string(),
|
||||
))?
|
||||
.clone()
|
||||
} else {
|
||||
let index_url = specifier.index.clone().unwrap();
|
||||
|
||||
index_url
|
||||
.clone()
|
||||
.try_into()
|
||||
// specifiers in indices store the index url in this field
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
PackageSources::Wally(crate::source::wally::WallyPackageSource::new(index_url))
|
||||
}
|
||||
DependencySpecifiers::Git(specifier) => PackageSources::Git(
|
||||
crate::source::git::GitPackageSource::new(specifier.repo.clone()),
|
||||
),
|
||||
DependencySpecifiers::Workspace(_) => {
|
||||
PackageSources::Workspace(crate::source::workspace::WorkspacePackageSource)
|
||||
}
|
||||
};
|
||||
|
||||
if refreshed_sources.insert(source.clone()) {
|
||||
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
|
||||
}
|
||||
|
||||
let (name, resolved) = source
|
||||
.resolve(&specifier, self, target, refreshed_sources)
|
||||
.await
|
||||
.map_err(|e| Box::new(e.into()))?;
|
||||
|
||||
let Some(target_version_id) = graph
|
||||
.get(&name)
|
||||
.and_then(|versions| {
|
||||
versions
|
||||
.keys()
|
||||
// only consider versions that are compatible with the specifier
|
||||
.filter(|ver| resolved.contains_key(ver))
|
||||
.max()
|
||||
})
|
||||
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
|
||||
.cloned()
|
||||
else {
|
||||
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
|
||||
format!("{specifier} ({target})"),
|
||||
)));
|
||||
};
|
||||
|
||||
let resolved_ty = if (is_published_package || depth == 0) && ty == DependencyType::Peer
|
||||
{
|
||||
DependencyType::Standard
|
||||
} else {
|
||||
ty
|
||||
};
|
||||
|
||||
if let Some((dependant_name, dependant_version_id)) = dependant {
|
||||
graph
|
||||
.get_mut(&dependant_name)
|
||||
.and_then(|versions| versions.get_mut(&dependant_version_id))
|
||||
.and_then(|node| {
|
||||
node.dependencies
|
||||
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
|
||||
});
|
||||
}
|
||||
|
||||
let pkg_ref = &resolved[&target_version_id];
|
||||
|
||||
if let Some(already_resolved) = graph
|
||||
.get_mut(&name)
|
||||
.and_then(|versions| versions.get_mut(&target_version_id))
|
||||
{
|
||||
tracing::debug!(
|
||||
"{}@{} already resolved",
|
||||
name,
|
||||
target_version_id
|
||||
);
|
||||
|
||||
if std::mem::discriminant(&already_resolved.pkg_ref)
|
||||
!= std::mem::discriminant(pkg_ref)
|
||||
{
|
||||
tracing::warn!(
|
||||
"resolved package {name}@{target_version_id} has a different source than previously resolved one, this may cause issues",
|
||||
);
|
||||
}
|
||||
|
||||
if already_resolved.resolved_ty == DependencyType::Peer {
|
||||
already_resolved.resolved_ty = resolved_ty;
|
||||
}
|
||||
|
||||
if ty == DependencyType::Peer && depth == 0 {
|
||||
already_resolved.is_peer = true;
|
||||
}
|
||||
|
||||
if already_resolved.direct.is_none() && depth == 0 {
|
||||
already_resolved.direct = Some((alias.clone(), specifier.clone(), ty));
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let node = DependencyGraphNode {
|
||||
direct: if depth == 0 {
|
||||
Some((alias.clone(), specifier.clone(), ty))
|
||||
} else {
|
||||
let index_url = specifier.index.clone().unwrap();
|
||||
|
||||
index_url
|
||||
.clone()
|
||||
.try_into()
|
||||
// specifiers in indices store the index url in this field
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
PackageSources::Pesde(PesdePackageSource::new(index_url))
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
let index_url = if depth == 0 || overridden {
|
||||
let index_name = specifier.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||
|
||||
manifest
|
||||
.wally_indices
|
||||
.get(index_name)
|
||||
.ok_or(errors::DependencyGraphError::WallyIndexNotFound(
|
||||
index_name.to_string(),
|
||||
))?
|
||||
.clone()
|
||||
None
|
||||
},
|
||||
pkg_ref: pkg_ref.clone(),
|
||||
dependencies: Default::default(),
|
||||
resolved_ty,
|
||||
is_peer: if depth == 0 {
|
||||
false
|
||||
} else {
|
||||
let index_url = specifier.index.clone().unwrap();
|
||||
ty == DependencyType::Peer
|
||||
},
|
||||
};
|
||||
insert_node(
|
||||
&mut graph,
|
||||
name.clone(),
|
||||
target_version_id.clone(),
|
||||
node.clone(),
|
||||
depth == 0,
|
||||
);
|
||||
|
||||
index_url
|
||||
.clone()
|
||||
.try_into()
|
||||
// specifiers in indices store the index url in this field
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
PackageSources::Wally(crate::source::wally::WallyPackageSource::new(index_url))
|
||||
}
|
||||
DependencySpecifiers::Git(specifier) => PackageSources::Git(
|
||||
crate::source::git::GitPackageSource::new(specifier.repo.clone()),
|
||||
),
|
||||
DependencySpecifiers::Workspace(_) => {
|
||||
PackageSources::Workspace(crate::source::workspace::WorkspacePackageSource)
|
||||
}
|
||||
};
|
||||
|
||||
if refreshed_sources.insert(source.clone()) {
|
||||
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
|
||||
}
|
||||
|
||||
let (name, resolved) = source
|
||||
.resolve(&specifier, self, target)
|
||||
.await
|
||||
.map_err(|e| Box::new(e.into()))?;
|
||||
|
||||
let Some(target_version_id) = graph
|
||||
.get(&name)
|
||||
.and_then(|versions| {
|
||||
versions
|
||||
.keys()
|
||||
// only consider versions that are compatible with the specifier
|
||||
.filter(|ver| resolved.contains_key(ver))
|
||||
.max()
|
||||
})
|
||||
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
|
||||
.cloned()
|
||||
else {
|
||||
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
|
||||
format!("{specifier} ({target})"),
|
||||
)));
|
||||
};
|
||||
|
||||
let ty = if depth == 0 && ty == DependencyType::Peer {
|
||||
DependencyType::Standard
|
||||
} else {
|
||||
ty
|
||||
};
|
||||
|
||||
if let Some((dependant_name, dependant_version_id)) = dependant {
|
||||
graph
|
||||
.get_mut(&dependant_name)
|
||||
.and_then(|versions| versions.get_mut(&dependant_version_id))
|
||||
.and_then(|node| {
|
||||
node.dependencies
|
||||
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
|
||||
});
|
||||
}
|
||||
|
||||
let pkg_ref = &resolved[&target_version_id];
|
||||
|
||||
if let Some(already_resolved) = graph
|
||||
.get_mut(&name)
|
||||
.and_then(|versions| versions.get_mut(&target_version_id))
|
||||
{
|
||||
log::debug!(
|
||||
"{}{}@{} already resolved",
|
||||
"\t".repeat(depth),
|
||||
tracing::debug!(
|
||||
"resolved {}@{} from new dependency graph",
|
||||
name,
|
||||
target_version_id
|
||||
);
|
||||
|
||||
if std::mem::discriminant(&already_resolved.pkg_ref)
|
||||
!= std::mem::discriminant(pkg_ref)
|
||||
for (dependency_alias, (dependency_spec, dependency_ty)) in
|
||||
pkg_ref.dependencies().clone()
|
||||
{
|
||||
log::warn!(
|
||||
"resolved package {name}@{target_version_id} has a different source than the previously resolved one, this may cause issues",
|
||||
);
|
||||
if dependency_ty == DependencyType::Dev {
|
||||
// dev dependencies of dependencies are to be ignored
|
||||
continue;
|
||||
}
|
||||
|
||||
let overridden = manifest.overrides.iter().find_map(|(key, spec)| {
|
||||
key.0.iter().find_map(|override_path| {
|
||||
// if the path up until the last element is the same as the current path,
|
||||
// and the last element in the path is the dependency alias,
|
||||
// then the specifier is to be overridden
|
||||
(path.len() == override_path.len() - 1
|
||||
&& path == override_path[..override_path.len() - 1]
|
||||
&& override_path.last() == Some(&dependency_alias))
|
||||
.then_some(spec)
|
||||
})
|
||||
});
|
||||
|
||||
if overridden.is_some() {
|
||||
tracing::debug!(
|
||||
"overridden specifier found for {} ({dependency_spec})",
|
||||
path.iter()
|
||||
.map(|s| s.as_str())
|
||||
.chain(std::iter::once(dependency_alias.as_str()))
|
||||
.collect::<Vec<_>>()
|
||||
.join(">"),
|
||||
);
|
||||
}
|
||||
|
||||
queue.push_back((
|
||||
overridden.cloned().unwrap_or(dependency_spec),
|
||||
dependency_ty,
|
||||
Some((name.clone(), target_version_id.clone())),
|
||||
path.iter()
|
||||
.cloned()
|
||||
.chain(std::iter::once(dependency_alias))
|
||||
.collect(),
|
||||
overridden.is_some(),
|
||||
*target_version_id.target(),
|
||||
));
|
||||
}
|
||||
|
||||
if already_resolved.ty == DependencyType::Peer && ty == DependencyType::Standard {
|
||||
already_resolved.ty = ty;
|
||||
}
|
||||
|
||||
if already_resolved.direct.is_none() && depth == 0 {
|
||||
already_resolved.direct = Some((alias.clone(), specifier.clone()));
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
let node = DependencyGraphNode {
|
||||
direct: if depth == 0 {
|
||||
Some((alias.clone(), specifier.clone()))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
pkg_ref: pkg_ref.clone(),
|
||||
dependencies: Default::default(),
|
||||
ty,
|
||||
};
|
||||
insert_node(
|
||||
&mut graph,
|
||||
name.clone(),
|
||||
target_version_id.clone(),
|
||||
node.clone(),
|
||||
depth == 0,
|
||||
);
|
||||
|
||||
log::debug!(
|
||||
"{}resolved {}@{} from new dependency graph",
|
||||
"\t".repeat(depth),
|
||||
name,
|
||||
target_version_id
|
||||
);
|
||||
|
||||
for (dependency_alias, (dependency_spec, dependency_ty)) in
|
||||
pkg_ref.dependencies().clone()
|
||||
{
|
||||
if dependency_ty == DependencyType::Dev {
|
||||
// dev dependencies of dependencies are to be ignored
|
||||
continue;
|
||||
}
|
||||
|
||||
let overridden = manifest.overrides.iter().find_map(|(key, spec)| {
|
||||
key.0.iter().find_map(|override_path| {
|
||||
// if the path up until the last element is the same as the current path,
|
||||
// and the last element in the path is the dependency alias,
|
||||
// then the specifier is to be overridden
|
||||
(path.len() == override_path.len() - 1
|
||||
&& path == override_path[..override_path.len() - 1]
|
||||
&& override_path.last() == Some(&dependency_alias))
|
||||
.then_some(spec)
|
||||
})
|
||||
});
|
||||
|
||||
if overridden.is_some() {
|
||||
log::debug!(
|
||||
"{}overridden specifier found for {dependency_alias} ({dependency_spec})",
|
||||
"\t".repeat(depth)
|
||||
);
|
||||
}
|
||||
|
||||
queue.push_back((
|
||||
dependency_alias,
|
||||
overridden.cloned().unwrap_or(dependency_spec),
|
||||
dependency_ty,
|
||||
Some((name.clone(), target_version_id.clone())),
|
||||
path.iter()
|
||||
.cloned()
|
||||
.chain(std::iter::once(alias.to_string()))
|
||||
.collect(),
|
||||
overridden.is_some(),
|
||||
*target_version_id.target(),
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
.instrument(tracing::info_span!("resolve new/changed", path = path.join(">")))
|
||||
.await?;
|
||||
}
|
||||
|
||||
for (name, versions) in &graph {
|
||||
for (name, versions) in &mut graph {
|
||||
for (version_id, node) in versions {
|
||||
if node.ty == DependencyType::Peer {
|
||||
log::warn!("peer dependency {name}@{version_id} was not resolved");
|
||||
if node.is_peer && node.direct.is_none() {
|
||||
node.resolved_ty = DependencyType::Peer;
|
||||
}
|
||||
|
||||
if node.resolved_ty == DependencyType::Peer {
|
||||
tracing::warn!("peer dependency {name}@{version_id} was not resolved");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
use crate::Project;
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fmt::{Display, Formatter},
|
||||
io::{BufRead, BufReader},
|
||||
fmt::{Debug, Display, Formatter},
|
||||
path::Path,
|
||||
process::{Command, Stdio},
|
||||
thread::spawn,
|
||||
process::Stdio,
|
||||
};
|
||||
use tokio::{
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
process::Command,
|
||||
};
|
||||
use tracing::instrument;
|
||||
|
||||
/// Script names used by pesde
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
|
@ -28,7 +31,8 @@ impl Display for ScriptName {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
||||
#[instrument(skip(project), level = "debug")]
|
||||
pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<OsStr> + Debug>(
|
||||
script_name: ScriptName,
|
||||
script_path: &Path,
|
||||
args: A,
|
||||
|
@ -47,20 +51,20 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
.spawn()
|
||||
{
|
||||
Ok(mut child) => {
|
||||
let stdout = BufReader::new(child.stdout.take().unwrap());
|
||||
let stderr = BufReader::new(child.stderr.take().unwrap());
|
||||
let mut stdout = BufReader::new(child.stdout.take().unwrap()).lines();
|
||||
let mut stderr = BufReader::new(child.stderr.take().unwrap()).lines();
|
||||
|
||||
let script = script_name.to_string();
|
||||
let script_2 = script.to_string();
|
||||
|
||||
spawn(move || {
|
||||
for line in stderr.lines() {
|
||||
tokio::spawn(async move {
|
||||
while let Some(line) = stderr.next_line().await.transpose() {
|
||||
match line {
|
||||
Ok(line) => {
|
||||
log::error!("[{script}]: {line}");
|
||||
tracing::error!("[{script}]: {line}");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("ERROR IN READING STDERR OF {script}: {e}");
|
||||
tracing::error!("ERROR IN READING STDERR OF {script}: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -69,18 +73,18 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
|
||||
let mut stdout_str = String::new();
|
||||
|
||||
for line in stdout.lines() {
|
||||
while let Some(line) = stdout.next_line().await.transpose() {
|
||||
match line {
|
||||
Ok(line) => {
|
||||
if return_stdout {
|
||||
stdout_str.push_str(&line);
|
||||
stdout_str.push('\n');
|
||||
} else {
|
||||
log::info!("[{script_2}]: {line}");
|
||||
tracing::info!("[{script_2}]: {line}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
|
||||
tracing::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -93,7 +97,7 @@ pub(crate) fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
|
|||
}
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
log::warn!("Lune could not be found in PATH: {e}");
|
||||
tracing::warn!("Lune could not be found in PATH: {e}");
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
|
|
@ -8,7 +8,8 @@ use relative_path::RelativePathBuf;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{
|
||||
collections::{BTreeMap, VecDeque},
|
||||
collections::BTreeMap,
|
||||
fmt::Debug,
|
||||
future::Future,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
@ -17,6 +18,7 @@ use tokio::{
|
|||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
pin,
|
||||
};
|
||||
use tracing::instrument;
|
||||
|
||||
/// A file system entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
@ -125,7 +127,8 @@ pub(crate) async fn store_in_cas<
|
|||
|
||||
impl PackageFS {
|
||||
/// Write the package to the given destination
|
||||
pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
|
||||
&self,
|
||||
destination: P,
|
||||
cas_path: Q,
|
||||
|
@ -169,44 +172,40 @@ impl PackageFS {
|
|||
PackageFS::Copy(src, target) => {
|
||||
fs::create_dir_all(destination.as_ref()).await?;
|
||||
|
||||
let mut read_dirs = VecDeque::from([fs::read_dir(src.to_path_buf())]);
|
||||
while let Some(read_dir) = read_dirs.pop_front() {
|
||||
let mut read_dir = read_dir.await?;
|
||||
'entry: while let Some(entry) = read_dir.next_entry().await? {
|
||||
let relative_path =
|
||||
RelativePathBuf::from_path(entry.path().strip_prefix(src).unwrap())
|
||||
.unwrap();
|
||||
let file_name = relative_path.file_name().unwrap();
|
||||
let mut read_dir = fs::read_dir(src).await?;
|
||||
'entry: while let Some(entry) = read_dir.next_entry().await? {
|
||||
let relative_path =
|
||||
RelativePathBuf::from_path(entry.path().strip_prefix(src).unwrap())
|
||||
.unwrap();
|
||||
let dest_path = relative_path.to_path(destination.as_ref());
|
||||
let file_name = relative_path.file_name().unwrap();
|
||||
|
||||
if entry.file_type().await?.is_dir() {
|
||||
if IGNORED_DIRS.contains(&file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for other_target in TargetKind::VARIANTS {
|
||||
if target.packages_folder(other_target) == file_name {
|
||||
continue 'entry;
|
||||
}
|
||||
}
|
||||
|
||||
fs::create_dir_all(relative_path.to_path(destination.as_ref())).await?;
|
||||
|
||||
read_dirs.push_back(fs::read_dir(entry.path()));
|
||||
if entry.file_type().await?.is_dir() {
|
||||
if IGNORED_DIRS.contains(&file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if IGNORED_FILES.contains(&file_name) {
|
||||
continue;
|
||||
for other_target in TargetKind::VARIANTS {
|
||||
if target.packages_folder(other_target) == file_name {
|
||||
continue 'entry;
|
||||
}
|
||||
}
|
||||
|
||||
let path = relative_path.to_path(destination.as_ref());
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
fs::copy(entry.path(), path).await?;
|
||||
#[cfg(windows)]
|
||||
fs::symlink_dir(entry.path(), dest_path).await?;
|
||||
#[cfg(unix)]
|
||||
fs::symlink(entry.path(), dest_path).await?;
|
||||
continue;
|
||||
}
|
||||
|
||||
if IGNORED_FILES.contains(&file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fs::symlink_file(entry.path(), dest_path).await?;
|
||||
#[cfg(unix)]
|
||||
fs::symlink(entry.path(), dest_path).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -215,7 +214,8 @@ impl PackageFS {
|
|||
}
|
||||
|
||||
/// Returns the contents of the file with the given hash
|
||||
pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>(
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub async fn read_file<P: AsRef<Path> + Debug, H: AsRef<str> + Debug>(
|
||||
&self,
|
||||
file_hash: H,
|
||||
cas_path: P,
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf, sync::Arc};
|
||||
|
||||
use crate::{
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
|
@ -14,14 +10,24 @@ use crate::{
|
|||
git_index::{read_file, GitBasedSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageRef,
|
||||
PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||
PackageSource, PackageSources, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
util::hash,
|
||||
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::{sync::Mutex, task::spawn_blocking};
|
||||
use tracing::instrument;
|
||||
|
||||
/// The Git package reference
|
||||
pub mod pkg_ref;
|
||||
|
@ -65,15 +71,18 @@ impl PackageSource for GitPackageSource {
|
|||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||
GitBasedSource::refresh(self, project).await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
_project_target: TargetKind,
|
||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
let repo = gix::open(self.path(project))
|
||||
.map_err(|e| errors::ResolveError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
|
||||
|
@ -323,6 +332,7 @@ impl PackageSource for GitPackageSource {
|
|||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn download(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
|
@ -337,7 +347,7 @@ impl PackageSource for GitPackageSource {
|
|||
|
||||
match fs::read_to_string(&index_file).await {
|
||||
Ok(s) => {
|
||||
log::debug!(
|
||||
tracing::debug!(
|
||||
"using cached index file for package {}#{}",
|
||||
pkg_ref.repo,
|
||||
pkg_ref.tree_id
|
||||
|
@ -481,7 +491,7 @@ impl PackageSource for GitPackageSource {
|
|||
}
|
||||
|
||||
if pkg_ref.use_new_structure() && name == "default.project.json" {
|
||||
log::debug!(
|
||||
tracing::debug!(
|
||||
"removing default.project.json from {}#{} at {path} - using new structure",
|
||||
pkg_ref.repo,
|
||||
pkg_ref.tree_id
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
#![allow(async_fn_in_trait)]
|
||||
|
||||
use crate::{util::authenticate_conn, Project};
|
||||
use fs_err::tokio as fs;
|
||||
use gix::remote::Direction;
|
||||
use std::fmt::Debug;
|
||||
use tokio::task::spawn_blocking;
|
||||
use tracing::instrument;
|
||||
|
||||
/// A trait for sources that are based on Git repositories
|
||||
pub trait GitBasedSource {
|
||||
|
@ -90,7 +93,11 @@ pub trait GitBasedSource {
|
|||
}
|
||||
|
||||
/// Reads a file from a tree
|
||||
pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix::bstr::BStr>>(
|
||||
#[instrument(skip(tree), ret, level = "trace")]
|
||||
pub fn read_file<
|
||||
I: IntoIterator<Item = P> + Clone + Debug,
|
||||
P: ToString + PartialEq<gix::bstr::BStr>,
|
||||
>(
|
||||
tree: &gix::Tree,
|
||||
file_path: I,
|
||||
) -> Result<Option<String>, errors::ReadFile> {
|
||||
|
@ -120,6 +127,7 @@ pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix:
|
|||
}
|
||||
|
||||
/// Gets the root tree of a repository
|
||||
#[instrument(skip(repo), level = "trace")]
|
||||
pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> {
|
||||
// this is a bare repo, so this is the actual path
|
||||
let path = repo.path().to_path_buf();
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::{collections::BTreeMap, fmt::Debug};
|
||||
|
||||
use crate::{
|
||||
manifest::target::{Target, TargetKind},
|
||||
names::PackageNames,
|
||||
|
@ -9,6 +7,10 @@ use crate::{
|
|||
},
|
||||
Project,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
fmt::Debug,
|
||||
};
|
||||
|
||||
/// Packages' filesystems
|
||||
pub mod fs;
|
||||
|
@ -76,11 +78,12 @@ impl PackageSource for PackageSources {
|
|||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
package_target: TargetKind,
|
||||
project_target: TargetKind,
|
||||
refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
match (self, specifier) {
|
||||
(PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source
|
||||
.resolve(specifier, project, package_target)
|
||||
.resolve(specifier, project, project_target, refreshed_sources)
|
||||
.await
|
||||
.map(|(name, results)| {
|
||||
(
|
||||
|
@ -95,7 +98,7 @@ impl PackageSource for PackageSources {
|
|||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
(PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source
|
||||
.resolve(specifier, project, package_target)
|
||||
.resolve(specifier, project, project_target, refreshed_sources)
|
||||
.await
|
||||
.map(|(name, results)| {
|
||||
(
|
||||
|
@ -109,7 +112,7 @@ impl PackageSource for PackageSources {
|
|||
.map_err(Into::into),
|
||||
|
||||
(PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source
|
||||
.resolve(specifier, project, package_target)
|
||||
.resolve(specifier, project, project_target, refreshed_sources)
|
||||
.await
|
||||
.map(|(name, results)| {
|
||||
(
|
||||
|
@ -124,7 +127,7 @@ impl PackageSource for PackageSources {
|
|||
|
||||
(PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => {
|
||||
source
|
||||
.resolve(specifier, project, package_target)
|
||||
.resolve(specifier, project, project_target, refreshed_sources)
|
||||
.await
|
||||
.map(|(name, results)| {
|
||||
(
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use gix::Url;
|
||||
use relative_path::RelativePathBuf;
|
||||
use reqwest::header::{ACCEPT, AUTHORIZATION};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet, HashSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use pkg_ref::PesdePackageRef;
|
||||
use specifier::PesdeDependencySpecifier;
|
||||
|
@ -18,11 +17,12 @@ use crate::{
|
|||
target::{Target, TargetKind},
|
||||
DependencyType,
|
||||
},
|
||||
names::PackageNames,
|
||||
names::{PackageName, PackageNames},
|
||||
source::{
|
||||
fs::{store_in_cas, FSEntry, PackageFS},
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||
DependencySpecifiers, PackageSource, PackageSources, ResolveResult, VersionId,
|
||||
IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
util::hash,
|
||||
Project,
|
||||
|
@ -30,6 +30,7 @@ use crate::{
|
|||
use fs_err::tokio as fs;
|
||||
use futures::StreamExt;
|
||||
use tokio::task::spawn_blocking;
|
||||
use tracing::instrument;
|
||||
|
||||
/// The pesde package reference
|
||||
pub mod pkg_ref;
|
||||
|
@ -73,6 +74,7 @@ impl PesdePackageSource {
|
|||
}
|
||||
|
||||
/// Reads the config file
|
||||
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
|
||||
pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
|
||||
let repo_url = self.repo_url.clone();
|
||||
let path = self.path(project);
|
||||
|
@ -90,14 +92,6 @@ impl PesdePackageSource {
|
|||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// The git2 repository for the index
|
||||
#[cfg(feature = "git2")]
|
||||
pub fn repo_git2(&self, project: &Project) -> Result<git2::Repository, git2::Error> {
|
||||
let path = self.path(project);
|
||||
|
||||
git2::Repository::open_bare(&path)
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageSource for PesdePackageSource {
|
||||
|
@ -107,15 +101,18 @@ impl PackageSource for PesdePackageSource {
|
|||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||
GitBasedSource::refresh(self, project).await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
package_target: TargetKind,
|
||||
project_target: TargetKind,
|
||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
let (scope, name) = specifier.name.as_str();
|
||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
||||
|
@ -131,10 +128,10 @@ impl PackageSource for PesdePackageSource {
|
|||
}
|
||||
};
|
||||
|
||||
let entries: IndexFile = toml::from_str(&string)
|
||||
let IndexFile { entries, .. } = toml::from_str(&string)
|
||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||
|
||||
log::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
|
||||
Ok((
|
||||
PackageNames::Pesde(specifier.name.clone()),
|
||||
|
@ -142,7 +139,7 @@ impl PackageSource for PesdePackageSource {
|
|||
.into_iter()
|
||||
.filter(|(VersionId(version, target), _)| {
|
||||
specifier.version.matches(version)
|
||||
&& specifier.target.unwrap_or(package_target) == *target
|
||||
&& specifier.target.unwrap_or(project_target) == *target
|
||||
})
|
||||
.map(|(id, entry)| {
|
||||
let version = id.version().clone();
|
||||
|
@ -162,6 +159,7 @@ impl PackageSource for PesdePackageSource {
|
|||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn download(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
|
@ -178,7 +176,7 @@ impl PackageSource for PesdePackageSource {
|
|||
|
||||
match fs::read_to_string(&index_file).await {
|
||||
Ok(s) => {
|
||||
log::debug!(
|
||||
tracing::debug!(
|
||||
"using cached index file for package {}@{} {}",
|
||||
pkg_ref.name,
|
||||
pkg_ref.version,
|
||||
|
@ -199,7 +197,7 @@ impl PackageSource for PesdePackageSource {
|
|||
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
|
||||
|
||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
||||
log::debug!("using token for {}", self.repo_url);
|
||||
tracing::debug!("using token for {}", self.repo_url);
|
||||
request = request.header(AUTHORIZATION, token);
|
||||
}
|
||||
|
||||
|
@ -264,9 +262,52 @@ fn default_archive_size() -> usize {
|
|||
4 * 1024 * 1024
|
||||
}
|
||||
|
||||
/// The allowed registries for a package
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum AllowedRegistries {
|
||||
/// All registries are allowed
|
||||
All(bool),
|
||||
/// Only specific registries are allowed
|
||||
#[serde(deserialize_with = "crate::util::deserialize_gix_url_hashset")]
|
||||
Specific(HashSet<Url>),
|
||||
}
|
||||
|
||||
impl Default for AllowedRegistries {
|
||||
fn default() -> Self {
|
||||
Self::All(false)
|
||||
}
|
||||
}
|
||||
|
||||
// strips .git suffix to allow for more flexible matching
|
||||
fn simplify_url(mut url: Url) -> Url {
|
||||
url.path = url.path.strip_suffix(b".git").unwrap_or(&url.path).into();
|
||||
url
|
||||
}
|
||||
|
||||
impl AllowedRegistries {
|
||||
fn _is_allowed(&self, url: &Url) -> bool {
|
||||
match self {
|
||||
Self::All(all) => *all,
|
||||
Self::Specific(urls) => urls.contains(url),
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the given URL is allowed
|
||||
pub fn is_allowed(&self, url: Url) -> bool {
|
||||
self._is_allowed(&simplify_url(url))
|
||||
}
|
||||
|
||||
/// Whether the given URL is allowed, or is the same as the given URL
|
||||
pub fn is_allowed_or_same(&self, this: Url, external: Url) -> bool {
|
||||
let this = simplify_url(this);
|
||||
let external = simplify_url(external);
|
||||
(this == external) || self._is_allowed(&external) || self._is_allowed(&this)
|
||||
}
|
||||
}
|
||||
|
||||
/// The configuration for the pesde index
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct IndexConfig {
|
||||
/// The URL of the API
|
||||
pub api: url::Url,
|
||||
|
@ -274,19 +315,22 @@ pub struct IndexConfig {
|
|||
pub download: Option<String>,
|
||||
/// Whether Git is allowed as a source for publishing packages
|
||||
#[serde(default)]
|
||||
pub git_allowed: bool,
|
||||
pub git_allowed: AllowedRegistries,
|
||||
/// Whether other registries are allowed as a source for publishing packages
|
||||
#[serde(default)]
|
||||
pub other_registries_allowed: bool,
|
||||
pub other_registries_allowed: AllowedRegistries,
|
||||
/// Whether Wally is allowed as a source for publishing packages
|
||||
#[serde(default)]
|
||||
pub wally_allowed: bool,
|
||||
pub wally_allowed: AllowedRegistries,
|
||||
/// The OAuth client ID for GitHub
|
||||
#[serde(default)]
|
||||
pub github_oauth_client_id: Option<String>,
|
||||
/// The maximum size of an archive in bytes
|
||||
#[serde(default = "default_archive_size")]
|
||||
pub max_archive_size: usize,
|
||||
/// The packages to display in the CLI for default script implementations
|
||||
#[serde(default)]
|
||||
pub scripts_packages: Vec<PackageName>,
|
||||
}
|
||||
|
||||
impl IndexConfig {
|
||||
|
@ -388,8 +432,20 @@ pub struct IndexFileEntry {
|
|||
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
|
||||
}
|
||||
|
||||
/// The package metadata in the index file
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct IndexMetadata {}
|
||||
|
||||
/// The index file for a package
|
||||
pub type IndexFile = BTreeMap<VersionId, IndexFileEntry>;
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub struct IndexFile {
|
||||
/// Any package-wide metadata
|
||||
#[serde(default, skip_serializing_if = "crate::util::is_default")]
|
||||
pub meta: IndexMetadata,
|
||||
/// The entries in the index file
|
||||
#[serde(flatten)]
|
||||
pub entries: BTreeMap<VersionId, IndexFileEntry>,
|
||||
}
|
||||
|
||||
/// Errors that can occur when interacting with the pesde package source
|
||||
pub mod errors {
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
#![allow(async_fn_in_trait)]
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{Debug, Display},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
|
@ -12,6 +7,10 @@ use crate::{
|
|||
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
||||
Project,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
fmt::{Debug, Display},
|
||||
};
|
||||
|
||||
/// A specifier for a dependency
|
||||
pub trait DependencySpecifier: Debug + Display {}
|
||||
|
@ -50,6 +49,7 @@ pub trait PackageSource: Debug {
|
|||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
project_target: TargetKind,
|
||||
refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError>;
|
||||
|
||||
/// Downloads a package
|
||||
|
|
|
@ -11,6 +11,7 @@ use crate::{
|
|||
Project, LINK_LIB_NO_FILE_FOUND,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -19,7 +20,8 @@ struct SourcemapNode {
|
|||
file_paths: Vec<RelativePathBuf>,
|
||||
}
|
||||
|
||||
pub(crate) async fn find_lib_path(
|
||||
#[instrument(skip(project, package_dir), level = "debug")]
|
||||
async fn find_lib_path(
|
||||
project: &Project,
|
||||
package_dir: &Path,
|
||||
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
|
||||
|
@ -29,7 +31,7 @@ pub(crate) async fn find_lib_path(
|
|||
.scripts
|
||||
.get(&ScriptName::SourcemapGenerator.to_string())
|
||||
else {
|
||||
log::warn!("no sourcemap generator script found in manifest");
|
||||
tracing::warn!("no sourcemap generator script found in manifest");
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
|
@ -39,7 +41,8 @@ pub(crate) async fn find_lib_path(
|
|||
[package_dir],
|
||||
project,
|
||||
true,
|
||||
)?;
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(result) = result.filter(|result| !result.is_empty()) {
|
||||
let node: SourcemapNode = serde_json::from_str(&result)?;
|
||||
|
@ -54,6 +57,7 @@ pub(crate) async fn find_lib_path(
|
|||
|
||||
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
||||
|
||||
#[instrument(skip(project, tempdir), level = "debug")]
|
||||
pub(crate) async fn get_target(
|
||||
project: &Project,
|
||||
tempdir: &TempDir,
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use semver::{Version, VersionReq};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
use crate::{
|
||||
manifest::{errors, DependencyType},
|
||||
names::wally::WallyPackageName,
|
||||
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
|
||||
};
|
||||
use semver::{Version, VersionReq};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Deserialize, Clone, Debug)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
|
@ -63,6 +63,7 @@ pub struct WallyManifest {
|
|||
|
||||
impl WallyManifest {
|
||||
/// Get all dependencies from the manifest
|
||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||
pub fn all_dependencies(
|
||||
&self,
|
||||
) -> Result<
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
|||
manifest::{Realm, WallyManifest},
|
||||
pkg_ref::WallyPackageRef,
|
||||
},
|
||||
IGNORED_DIRS, IGNORED_FILES,
|
||||
PackageSources, ResolveResult, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
util::hash,
|
||||
Project,
|
||||
|
@ -22,10 +22,15 @@ use gix::Url;
|
|||
use relative_path::RelativePathBuf;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use serde::Deserialize;
|
||||
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use tempfile::tempdir;
|
||||
use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking};
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::instrument;
|
||||
|
||||
pub(crate) mod compat_util;
|
||||
pub(crate) mod manifest;
|
||||
|
@ -64,6 +69,7 @@ impl WallyPackageSource {
|
|||
}
|
||||
|
||||
/// Reads the config file
|
||||
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
|
||||
pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> {
|
||||
let repo_url = self.repo_url.clone();
|
||||
let path = self.path(project);
|
||||
|
@ -90,22 +96,62 @@ impl PackageSource for WallyPackageSource {
|
|||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
||||
GitBasedSource::refresh(self, project).await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
_package_target: TargetKind,
|
||||
) -> Result<crate::source::ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
project_target: TargetKind,
|
||||
refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||
let (scope, name) = specifier.name.as_str();
|
||||
let string = match read_file(&tree, [scope, name]) {
|
||||
Ok(Some(s)) => s,
|
||||
Ok(None) => return Err(Self::ResolveError::NotFound(specifier.name.to_string())),
|
||||
Ok(None) => {
|
||||
tracing::debug!(
|
||||
"{} not found in wally registry. searching in backup registries",
|
||||
specifier.name
|
||||
);
|
||||
|
||||
let config = self.config(project).await.map_err(Box::new)?;
|
||||
for registry in config.fallback_registries {
|
||||
let source = WallyPackageSource::new(registry.clone());
|
||||
if refreshed_sources.insert(PackageSources::Wally(source.clone())) {
|
||||
GitBasedSource::refresh(&source, project)
|
||||
.await
|
||||
.map_err(Box::new)?;
|
||||
}
|
||||
|
||||
match Box::pin(source.resolve(
|
||||
specifier,
|
||||
project,
|
||||
project_target,
|
||||
refreshed_sources,
|
||||
))
|
||||
.await
|
||||
{
|
||||
Ok((name, results)) => {
|
||||
tracing::debug!("found {} in backup registry {registry}", name);
|
||||
return Ok((name, results));
|
||||
}
|
||||
Err(errors::ResolveError::NotFound(_)) => {
|
||||
continue;
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Err(Self::ResolveError::NotFound(specifier.name.to_string()));
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(Self::ResolveError::Read(
|
||||
specifier.name.to_string(),
|
||||
|
@ -120,7 +166,7 @@ impl PackageSource for WallyPackageSource {
|
|||
.collect::<Result<_, _>>()
|
||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||
|
||||
log::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
|
||||
Ok((
|
||||
PackageNames::Wally(specifier.name.clone()),
|
||||
|
@ -150,6 +196,7 @@ impl PackageSource for WallyPackageSource {
|
|||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn download(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
|
@ -165,7 +212,7 @@ impl PackageSource for WallyPackageSource {
|
|||
|
||||
let tempdir = match fs::read_to_string(&index_file).await {
|
||||
Ok(s) => {
|
||||
log::debug!(
|
||||
tracing::debug!(
|
||||
"using cached index file for package {}@{}",
|
||||
pkg_ref.name,
|
||||
pkg_ref.version
|
||||
|
@ -198,7 +245,7 @@ impl PackageSource for WallyPackageSource {
|
|||
);
|
||||
|
||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
||||
log::debug!("using token for {}", self.repo_url);
|
||||
tracing::debug!("using token for {}", self.repo_url);
|
||||
request = request.header(AUTHORIZATION, token);
|
||||
}
|
||||
|
||||
|
@ -289,6 +336,8 @@ impl PackageSource for WallyPackageSource {
|
|||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct WallyIndexConfig {
|
||||
api: url::Url,
|
||||
#[serde(default, deserialize_with = "crate::util::deserialize_gix_url_vec")]
|
||||
fallback_registries: Vec<gix::Url>,
|
||||
}
|
||||
|
||||
/// Errors that can occur when interacting with a Wally package source
|
||||
|
@ -327,6 +376,18 @@ pub mod errors {
|
|||
String,
|
||||
#[source] crate::manifest::errors::AllDependenciesError,
|
||||
),
|
||||
|
||||
/// Error reading config file
|
||||
#[error("error reading config file")]
|
||||
Config(#[from] Box<ConfigError>),
|
||||
|
||||
/// Error refreshing backup registry source
|
||||
#[error("error refreshing backup registry source")]
|
||||
Refresh(#[from] Box<crate::source::git_index::errors::RefreshError>),
|
||||
|
||||
/// Error resolving package in backup registries
|
||||
#[error("error resolving package in backup registries")]
|
||||
BackupResolve(#[from] Box<ResolveError>),
|
||||
}
|
||||
|
||||
/// Errors that can occur when reading the config file for a Wally package source
|
||||
|
|
|
@ -3,15 +3,17 @@ use crate::{
|
|||
names::PackageNames,
|
||||
source::{
|
||||
fs::PackageFS, specifiers::DependencySpecifiers, traits::PackageSource,
|
||||
version_id::VersionId, workspace::pkg_ref::WorkspacePackageRef, ResolveResult,
|
||||
version_id::VersionId, workspace::pkg_ref::WorkspacePackageRef, PackageSources,
|
||||
ResolveResult,
|
||||
},
|
||||
Project, DEFAULT_INDEX_NAME,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use relative_path::RelativePathBuf;
|
||||
use reqwest::Client;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
use tokio::pin;
|
||||
use tracing::instrument;
|
||||
|
||||
/// The workspace package reference
|
||||
pub mod pkg_ref;
|
||||
|
@ -34,20 +36,22 @@ impl PackageSource for WorkspacePackageSource {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
project: &Project,
|
||||
package_target: TargetKind,
|
||||
project_target: TargetKind,
|
||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
let (path, manifest) = 'finder: {
|
||||
let workspace_dir = project
|
||||
.workspace_dir
|
||||
.as_ref()
|
||||
.unwrap_or(&project.package_dir);
|
||||
let target = specifier.target.unwrap_or(package_target);
|
||||
let target = specifier.target.unwrap_or(project_target);
|
||||
|
||||
let members = project.workspace_members(workspace_dir).await?;
|
||||
let members = project.workspace_members(workspace_dir, true).await?;
|
||||
pin!(members);
|
||||
|
||||
while let Some((path, manifest)) = members.next().await.transpose()? {
|
||||
|
@ -124,6 +128,7 @@ impl PackageSource for WorkspacePackageSource {
|
|||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn download(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
|
|
|
@ -19,7 +19,7 @@ impl DependencySpecifier for WorkspaceDependencySpecifier {}
|
|||
|
||||
impl Display for WorkspaceDependencySpecifier {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "workspace:{}{}", self.version, self.name)
|
||||
write!(f, "{}@workspace:{}", self.name, self.version)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,6 +108,7 @@ pub mod errors {
|
|||
|
||||
/// Errors that can occur when parsing a version type
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum VersionTypeFromStr {
|
||||
/// The version type is invalid
|
||||
#[error("invalid version type {0}")]
|
||||
|
@ -116,6 +117,7 @@ pub mod errors {
|
|||
|
||||
/// Errors that can occur when parsing a version type or requirement
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum VersionTypeOrReqFromStr {
|
||||
/// The version requirement is invalid
|
||||
#[error("invalid version requirement {0}")]
|
||||
|
|
37
src/util.rs
37
src/util.rs
|
@ -1,8 +1,8 @@
|
|||
use crate::AuthConfig;
|
||||
use gix::bstr::BStr;
|
||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serializer};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
|
||||
pub fn authenticate_conn(
|
||||
conn: &mut gix::remote::Connection<
|
||||
|
@ -37,17 +37,6 @@ pub fn deserialize_gix_url<'de, D: Deserializer<'de>>(
|
|||
gix::Url::from_bytes(BStr::new(&s)).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
pub fn serialize_gix_url_map<S: Serializer>(
|
||||
url: &BTreeMap<String, gix::Url>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
let mut map = serializer.serialize_map(Some(url.len()))?;
|
||||
for (k, v) in url {
|
||||
map.serialize_entry(k, &v.to_bstring().to_string())?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
|
||||
pub fn deserialize_gix_url_map<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<BTreeMap<String, gix::Url>, D::Error> {
|
||||
|
@ -61,6 +50,24 @@ pub fn deserialize_gix_url_map<'de, D: Deserializer<'de>>(
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn deserialize_gix_url_vec<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<Vec<gix::Url>, D::Error> {
|
||||
Vec::<String>::deserialize(deserializer)?
|
||||
.into_iter()
|
||||
.map(|v| gix::Url::from_bytes(BStr::new(&v)).map_err(serde::de::Error::custom))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn deserialize_gix_url_hashset<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<HashSet<gix::Url>, D::Error> {
|
||||
HashSet::<String>::deserialize(deserializer)?
|
||||
.into_iter()
|
||||
.map(|v| gix::Url::from_bytes(BStr::new(&v)).map_err(serde::de::Error::custom))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn deserialize_git_like_url<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<gix::Url, D::Error> {
|
||||
|
@ -76,3 +83,7 @@ pub fn deserialize_git_like_url<'de, D: Deserializer<'de>>(
|
|||
pub fn hash<S: AsRef<[u8]>>(struc: S) -> String {
|
||||
format!("{:x}", Sha256::digest(struc.as_ref()))
|
||||
}
|
||||
|
||||
pub fn is_default<T: Default + Eq>(t: &T) -> bool {
|
||||
t == &T::default()
|
||||
}
|
||||
|
|
Binary file not shown.
|
@ -12,7 +12,7 @@
|
|||
"format": "prettier --write ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-vercel": "^5.4.6",
|
||||
"@sveltejs/adapter-cloudflare": "^4.8.0",
|
||||
"@sveltejs/kit": "^2.7.3",
|
||||
"@sveltejs/vite-plugin-svelte": "^4.0.0",
|
||||
"@tailwindcss/typography": "^0.5.15",
|
||||
|
@ -24,6 +24,9 @@
|
|||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-svelte": "^2.46.0",
|
||||
"globals": "^15.11.0",
|
||||
"mdast": "^3.0.0",
|
||||
"mdast-util-directive": "^3.0.0",
|
||||
"mdast-util-to-hast": "^13.2.0",
|
||||
"mdsvex": "^0.12.3",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-svelte": "^3.2.7",
|
||||
|
@ -33,7 +36,8 @@
|
|||
"tailwindcss": "^3.4.14",
|
||||
"typescript": "^5.6.3",
|
||||
"typescript-eslint": "^8.12.2",
|
||||
"vite": "^5.4.10"
|
||||
"vite": "^5.4.10",
|
||||
"wrangler": "^3.91.0"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
|
@ -47,15 +51,19 @@
|
|||
"hast-util-heading": "^3.0.0",
|
||||
"hast-util-heading-rank": "^3.0.0",
|
||||
"hast-util-to-text": "^4.0.2",
|
||||
"hastscript": "^9.0.0",
|
||||
"lucide-static": "^0.462.0",
|
||||
"lucide-svelte": "^0.446.0",
|
||||
"rehype-infer-description-meta": "^2.0.0",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"rehype-sanitize": "^6.0.0",
|
||||
"rehype-slug": "^6.0.0",
|
||||
"rehype-stringify": "^10.0.1",
|
||||
"remark-directive": "^3.0.0",
|
||||
"remark-frontmatter": "^5.0.0",
|
||||
"remark-gemoji": "^8.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-github-admonitions-to-directives": "^2.1.0",
|
||||
"remark-parse": "^11.0.0",
|
||||
"remark-rehype": "^11.1.1",
|
||||
"shiki": "^1.22.2",
|
||||
|
|
108
website/src/admonitions.css
Normal file
108
website/src/admonitions.css
Normal file
|
@ -0,0 +1,108 @@
|
|||
.admonition {
|
||||
@apply my-4 rounded-sm px-4 py-3 text-[--tw-prose-body] prose-p:my-2 prose-pre:my-4;
|
||||
@apply border-l-4 border-[--admonition-border];
|
||||
@apply bg-[--admonition-bg];
|
||||
|
||||
@apply [--shiki-background:theme(colors.white/0.2)];
|
||||
@apply dark:[--shiki-background:theme(colors.black/0.2)];
|
||||
|
||||
--tw-prose-body: theme(colors.light);
|
||||
--tw-prose-headings: theme(colors.light);
|
||||
--tw-prose-lead: theme(colors.light);
|
||||
--tw-prose-links: var(--admonition-text);
|
||||
--tw-prose-bold: theme(colors.light);
|
||||
--tw-prose-counters: theme(colors.light);
|
||||
--tw-prose-bullets: var(--admonition-border);
|
||||
--tw-prose-hr: var(--admonition-border);
|
||||
--tw-prose-quotes: theme(colors.light);
|
||||
--tw-prose-quote-borders: var(--admonition-border);
|
||||
--tw-prose-code: theme(colors.light);
|
||||
--tw-prose-pre-code: theme(colors.light);
|
||||
--tw-prose-pre-bg: var(--shiki-background);
|
||||
--tw-prose-th-borders: var(--admonition-border);
|
||||
--tw-prose-td-borders: var(--admonition-border);
|
||||
}
|
||||
|
||||
.admonition pre {
|
||||
@apply border border-[--admonition-border] bg-[--shiki-background];
|
||||
}
|
||||
|
||||
.admonition-title {
|
||||
@apply flex items-center space-x-2 text-lg font-semibold;
|
||||
}
|
||||
|
||||
.admonition-title * {
|
||||
color: var(--admonition-text);
|
||||
}
|
||||
|
||||
.admonition-icon {
|
||||
@apply inline-block size-6 bg-current;
|
||||
mask-image: var(--admonition-icon);
|
||||
}
|
||||
|
||||
.admonition-note {
|
||||
--admonition-bg: theme(colors.blue.600 / 0.1);
|
||||
--admonition-border: theme(colors.blue.600 / 0.4);
|
||||
--admonition-text: theme(colors.blue.950);
|
||||
--admonition-icon: url(lucide-static/icons/info.svg);
|
||||
}
|
||||
|
||||
.admonition-tip {
|
||||
--admonition-bg: theme(colors.green.600 / 0.1);
|
||||
--admonition-border: theme(colors.green.600 / 0.4);
|
||||
--admonition-text: theme(colors.green.950);
|
||||
--admonition-icon: url(lucide-static/icons/lightbulb.svg);
|
||||
}
|
||||
|
||||
.admonition-info {
|
||||
--admonition-bg: theme(colors.purple.600 / 0.1);
|
||||
--admonition-border: theme(colors.purple.600 / 0.4);
|
||||
--admonition-text: theme(colors.purple.950);
|
||||
--admonition-icon: url(lucide-static/icons/message-square-warning.svg);
|
||||
}
|
||||
|
||||
.admonition-warning {
|
||||
--admonition-bg: theme(colors.yellow.600 / 0.1);
|
||||
--admonition-border: theme(colors.yellow.600 / 0.4);
|
||||
--admonition-text: theme(colors.yellow.950);
|
||||
--admonition-icon: url(lucide-static/icons/triangle-alert.svg);
|
||||
}
|
||||
|
||||
.admonition-danger {
|
||||
--admonition-bg: theme(colors.red.600 / 0.1);
|
||||
--admonition-border: theme(colors.red.600 / 0.4);
|
||||
--admonition-text: theme(colors.red.950);
|
||||
--admonition-icon: url(lucide-static/icons/octagon-alert.svg);
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
.admonition-note {
|
||||
--admonition-bg: theme(colors.blue.500 / 0.1);
|
||||
--admonition-border: theme(colors.blue.500 / 0.6);
|
||||
--admonition-text: theme(colors.blue.100);
|
||||
}
|
||||
|
||||
.admonition-tip {
|
||||
--admonition-bg: theme(colors.green.500 / 0.1);
|
||||
--admonition-border: theme(colors.green.500 / 0.6);
|
||||
--admonition-text: theme(colors.green.100);
|
||||
}
|
||||
|
||||
.admonition-info {
|
||||
--admonition-bg: theme(colors.purple.500 / 0.1);
|
||||
--admonition-border: theme(colors.purple.500 / 0.6);
|
||||
--admonition-text: theme(colors.purple.100);
|
||||
}
|
||||
|
||||
.admonition-warning {
|
||||
--admonition-bg: theme(colors.yellow.500 / 0.1);
|
||||
--admonition-border: theme(colors.yellow.500 / 0.6);
|
||||
--admonition-text: theme(colors.yellow.100);
|
||||
}
|
||||
|
||||
.admonition-danger {
|
||||
--admonition-bg: theme(colors.red.500 / 0.1);
|
||||
--admonition-border: theme(colors.red.500 / 0.6);
|
||||
--admonition-text: theme(colors.red.100);
|
||||
}
|
||||
}
|
|
@ -2,6 +2,8 @@
|
|||
@import "tailwindcss/components";
|
||||
@import "tailwindcss/utilities";
|
||||
|
||||
@import "admonitions.css";
|
||||
|
||||
:root {
|
||||
--color-background: 255 245 230;
|
||||
--color-card: 245 230 210;
|
||||
|
|
2
website/src/app.d.ts
vendored
2
website/src/app.d.ts
vendored
|
@ -1,3 +1,5 @@
|
|||
/// <reference types="mdast-util-directive" />
|
||||
|
||||
// See https://kit.svelte.dev/docs/types#app
|
||||
// for information about these interfaces
|
||||
declare global {
|
||||
|
|
|
@ -3,25 +3,116 @@ import type { Nodes } from "hast"
|
|||
import { heading } from "hast-util-heading"
|
||||
import { headingRank } from "hast-util-heading-rank"
|
||||
import { toText } from "hast-util-to-text"
|
||||
import { h, type Child } from "hastscript"
|
||||
import type { ContainerDirective } from "mdast-util-directive"
|
||||
import type { Handler } from "mdast-util-to-hast"
|
||||
import rehypeInferDescriptionMeta from "rehype-infer-description-meta"
|
||||
import rehypeRaw from "rehype-raw"
|
||||
import rehypeSanitize from "rehype-sanitize"
|
||||
import rehypeSanitize, { defaultSchema } from "rehype-sanitize"
|
||||
import rehypeSlug from "rehype-slug"
|
||||
import rehypeStringify from "rehype-stringify"
|
||||
import remarkDirective from "remark-directive"
|
||||
import remarkFrontmatter from "remark-frontmatter"
|
||||
import remarkGemoji from "remark-gemoji"
|
||||
import remarkGfm from "remark-gfm"
|
||||
import remarkGithubAdmonitionsToDirectives from "remark-github-admonitions-to-directives"
|
||||
import remarkParse from "remark-parse"
|
||||
import remarkRehype from "remark-rehype"
|
||||
import { createCssVariablesTheme, createHighlighter } from "shiki"
|
||||
import { createCssVariablesTheme, createHighlighter, loadWasm } from "shiki"
|
||||
import { unified } from "unified"
|
||||
import type { Node } from "unist"
|
||||
import { map } from "unist-util-map"
|
||||
|
||||
const highlighter = createHighlighter({
|
||||
themes: [],
|
||||
langs: [],
|
||||
})
|
||||
// @ts-expect-error - typescript doesn't like the wasm import
|
||||
import onigWasm from "shiki/onig.wasm"
|
||||
|
||||
const loadOnigWasm = (async () => {
|
||||
await loadWasm(onigWasm())
|
||||
})()
|
||||
|
||||
const highlighter = (async () => {
|
||||
await loadOnigWasm
|
||||
|
||||
return await createHighlighter({
|
||||
themes: [],
|
||||
langs: [],
|
||||
})
|
||||
})()
|
||||
|
||||
const ADMONITION_TYPES = {
|
||||
note: {
|
||||
label: "Note",
|
||||
},
|
||||
tip: {
|
||||
label: "Tip",
|
||||
},
|
||||
info: {
|
||||
label: "Info",
|
||||
},
|
||||
warning: {
|
||||
label: "Warning",
|
||||
},
|
||||
danger: {
|
||||
label: "Danger",
|
||||
},
|
||||
}
|
||||
|
||||
const containerDirectiveHandler: Handler = (state, node: ContainerDirective) => {
|
||||
const type = node.name as keyof typeof ADMONITION_TYPES
|
||||
if (!type || !(type in ADMONITION_TYPES)) {
|
||||
return
|
||||
}
|
||||
|
||||
const typeInfo = ADMONITION_TYPES[type]
|
||||
|
||||
let label: Child = typeInfo.label
|
||||
|
||||
const firstChild = node.children[0]
|
||||
if (firstChild?.type === "paragraph" && firstChild.data?.directiveLabel) {
|
||||
node.children.shift()
|
||||
label = state.all(firstChild)
|
||||
}
|
||||
|
||||
return h(
|
||||
"div",
|
||||
{
|
||||
class: `admonition admonition-${type}`,
|
||||
},
|
||||
[
|
||||
h(
|
||||
"p",
|
||||
{
|
||||
class: "admonition-title",
|
||||
},
|
||||
[
|
||||
h("span", {
|
||||
class: "admonition-icon",
|
||||
}),
|
||||
h(
|
||||
"span",
|
||||
{
|
||||
class: "admonition-label",
|
||||
},
|
||||
label,
|
||||
),
|
||||
],
|
||||
),
|
||||
state.all(node),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
const sanitizeSchema: typeof defaultSchema = {
|
||||
...defaultSchema,
|
||||
attributes: {
|
||||
...defaultSchema.attributes,
|
||||
"*": [...(defaultSchema.attributes?.["*"] ?? []), ["className", "admonition", /^admonition-/]],
|
||||
},
|
||||
}
|
||||
|
||||
const remarkRehypeHandlers = {
|
||||
containerDirective: containerDirectiveHandler,
|
||||
}
|
||||
|
||||
export const markdown = (async () => {
|
||||
return unified()
|
||||
|
@ -29,9 +120,11 @@ export const markdown = (async () => {
|
|||
.use(remarkFrontmatter)
|
||||
.use(remarkGfm)
|
||||
.use(remarkGemoji)
|
||||
.use(remarkRehype, { allowDangerousHtml: true })
|
||||
.use(remarkGithubAdmonitionsToDirectives)
|
||||
.use(remarkDirective)
|
||||
.use(remarkRehype, { allowDangerousHtml: true, handlers: remarkRehypeHandlers })
|
||||
.use(rehypeRaw)
|
||||
.use(rehypeSanitize)
|
||||
.use(rehypeSanitize, sanitizeSchema)
|
||||
.use(rehypeShikiFromHighlighter, await highlighter, {
|
||||
lazy: true,
|
||||
theme: createCssVariablesTheme({
|
||||
|
@ -58,7 +151,13 @@ export const docsMarkdown = (async () => {
|
|||
.use(remarkFrontmatter)
|
||||
.use(remarkGfm)
|
||||
.use(remarkGemoji)
|
||||
.use(remarkRehype, { allowDangerousHtml: true, clobberPrefix: "" })
|
||||
.use(remarkGithubAdmonitionsToDirectives)
|
||||
.use(remarkDirective)
|
||||
.use(remarkRehype, {
|
||||
allowDangerousHtml: true,
|
||||
clobberPrefix: "",
|
||||
handlers: remarkRehypeHandlers,
|
||||
})
|
||||
.use(rehypeSlug)
|
||||
.use(() => (node, file) => {
|
||||
const toc: TocItem[] = []
|
||||
|
@ -97,7 +196,7 @@ export const docsMarkdown = (async () => {
|
|||
}) as Node
|
||||
})
|
||||
.use(rehypeRaw)
|
||||
.use(rehypeSanitize)
|
||||
.use(rehypeSanitize, sanitizeSchema)
|
||||
.use(rehypeShikiFromHighlighter, await highlighter, {
|
||||
lazy: true,
|
||||
theme: createCssVariablesTheme({
|
||||
|
|
|
@ -26,6 +26,7 @@ export type TargetInfo = {
|
|||
kind: TargetKind
|
||||
lib: boolean
|
||||
bin: boolean
|
||||
scripts?: string[]
|
||||
}
|
||||
|
||||
export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau"
|
||||
|
|
|
@ -60,7 +60,7 @@
|
|||
</div>
|
||||
<div class="flex items-center py-5">
|
||||
<a
|
||||
href="https://github.com/daimond113/pesde"
|
||||
href="https://github.com/pesde-pkg/pesde"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>
|
||||
|
|
|
@ -62,7 +62,7 @@
|
|||
<a href="https://docs.pesde.daimond113.com/registry/policies">Policies</a>
|
||||
</nav>
|
||||
|
||||
<a href="https://github.com/daimond113/pesde" target="_blank" rel="noreferrer noopener">
|
||||
<a href="https://github.com/pesde-pkg/pesde" target="_blank" rel="noreferrer noopener">
|
||||
<GitHub class="size-6" />
|
||||
</a>
|
||||
</div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { page } from "$app/stores"
|
||||
import GitHub from "$lib/components/GitHub.svelte"
|
||||
import type { TargetInfo } from "$lib/registry-api"
|
||||
import { BinaryIcon, Globe, Icon, LibraryIcon, Mail } from "lucide-svelte"
|
||||
import { BinaryIcon, Globe, Icon, LibraryIcon, Mail, ScrollIcon } from "lucide-svelte"
|
||||
import type { ComponentType } from "svelte"
|
||||
import TargetSelector from "../../TargetSelector.svelte"
|
||||
import Command from "./Command.svelte"
|
||||
|
@ -36,11 +36,13 @@
|
|||
const exportNames: Partial<Record<keyof TargetInfo, string>> = {
|
||||
lib: "Library",
|
||||
bin: "Binary",
|
||||
scripts: "Scripts",
|
||||
}
|
||||
|
||||
const exportIcons: Partial<Record<keyof TargetInfo, ComponentType<Icon>>> = {
|
||||
lib: LibraryIcon,
|
||||
bin: BinaryIcon,
|
||||
scripts: ScrollIcon,
|
||||
}
|
||||
|
||||
const exportEntries = $derived(
|
||||
|
@ -92,20 +94,30 @@
|
|||
<ul class="mb-6 space-y-0.5">
|
||||
{#each exportEntries as [exportKey, exportName]}
|
||||
{@const Icon = exportIcons[exportKey as keyof TargetInfo]}
|
||||
<li class="flex items-center">
|
||||
<Icon aria-hidden="true" class="text-primary mr-2 size-5" />
|
||||
{exportName}
|
||||
<li>
|
||||
<div class="flex items-center">
|
||||
<Icon aria-hidden="true" class="text-primary mr-2 size-5" />
|
||||
{exportName}
|
||||
</div>
|
||||
{#if exportKey === "bin"}
|
||||
<p class="text-body/80 mb-4 mt-3 text-sm">
|
||||
This package provides a binary that can be executed after installation, or globally
|
||||
via:
|
||||
</p>
|
||||
<Command command={xCommand} class="mb-6" />
|
||||
{:else if exportKey === "scripts"}
|
||||
<div class="text-body/80 mt-3 flex flex-wrap gap-2 text-sm">
|
||||
{#each currentTarget?.scripts ?? [] as script}
|
||||
<div class="bg-card text-heading w-max truncate rounded px-3 py-2" title={script}>
|
||||
{script}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
|
||||
{#if currentTarget?.bin}
|
||||
<p class="text-body/80 -mt-3 mb-4 text-sm">
|
||||
This package provides a binary that can be executed after installation, or globally via:
|
||||
</p>
|
||||
<Command command={xCommand} class="mb-6" />
|
||||
{/if}
|
||||
|
||||
{#if data.pkg.authors && data.pkg.authors.length > 0}
|
||||
<h2 class="text-heading mb-2 text-lg font-semibold">Authors</h2>
|
||||
<ul>
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
const { data } = $props()
|
||||
</script>
|
||||
|
||||
<div class="prose min-w-0 py-8 prose-pre:w-full prose-pre:overflow-auto">
|
||||
<div
|
||||
class="prose prose-pre:w-full prose-pre:overflow-auto prose-img:inline-block prose-img:m-0 prose-video:inline-block prose-video:m-0 min-w-0 py-8"
|
||||
>
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html data.readmeHtml}
|
||||
</div>
|
||||
|
|
|
@ -52,7 +52,10 @@
|
|||
: (dependencyInfo.target ?? $page.params.target ?? data.pkg.targets[0].kind)}
|
||||
{@const isOfficialRegistry = isWally
|
||||
? dependencyInfo.index.toLowerCase() === "https://github.com/upliftgames/wally-index"
|
||||
: dependencyInfo.index.toLowerCase() === "https://github.com/daimond113/pesde-index"}
|
||||
: [
|
||||
"https://github.com/daimond113/pesde-index",
|
||||
"https://github.com/pesde-pkg/index",
|
||||
].includes(dependencyInfo.index.toLowerCase())}
|
||||
|
||||
<article
|
||||
class={`bg-card relative overflow-hidden rounded px-5 py-4 transition ${
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { mdsvex } from "mdsvex"
|
||||
import adapter from "@sveltejs/adapter-cloudflare"
|
||||
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte"
|
||||
import adapter from "@sveltejs/adapter-vercel"
|
||||
import { mdsvex } from "mdsvex"
|
||||
|
||||
/** @type {import('@sveltejs/kit').Config} */
|
||||
const config = {
|
||||
|
|
|
@ -1,6 +1,89 @@
|
|||
import { sveltekit } from "@sveltejs/kit/vite"
|
||||
import { defineConfig } from "vite"
|
||||
import { readFile } from "node:fs/promises"
|
||||
import path from "node:path"
|
||||
import { defineConfig, type Plugin, type ResolvedConfig } from "vite"
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [sveltekit()],
|
||||
plugins: [sveltekit(), cloudflareWasmImport()],
|
||||
})
|
||||
|
||||
// This plugin allows us to import WebAssembly modules and have them work in
|
||||
// both the browser, Node.js, and Cloudflare Workers.
|
||||
function cloudflareWasmImport(): Plugin {
|
||||
const wasmPostfix = ".wasm"
|
||||
const importMetaPrefix = "___WASM_IMPORT_PATH___"
|
||||
|
||||
let config: ResolvedConfig
|
||||
|
||||
return {
|
||||
name: "cloudflare-wasm-import",
|
||||
configResolved(resolvedConfig) {
|
||||
config = resolvedConfig
|
||||
},
|
||||
async load(id) {
|
||||
if (!id.endsWith(wasmPostfix)) return
|
||||
|
||||
if (config.command === "serve") {
|
||||
// Running dev server
|
||||
|
||||
// We generate a module that on the browser will fetch the WASM file
|
||||
// (through a Vite `?url` import), and on the server will read the file
|
||||
// from the file system.
|
||||
|
||||
return `
|
||||
import WASM_URL from ${JSON.stringify(`${id}?url`)}
|
||||
|
||||
let promise
|
||||
export default function() {
|
||||
if (import.meta.env.SSR) {
|
||||
return promise ?? (promise = import("node:fs/promises")
|
||||
.then(({ readFile }) => readFile(${JSON.stringify(id)})))
|
||||
} else {
|
||||
return promise ?? (promise = fetch(WASM_URL).then(r => r.arrayBuffer()))
|
||||
}
|
||||
}
|
||||
`
|
||||
}
|
||||
|
||||
// When building, we emit the WASM file as an asset and generate a module
|
||||
// that will fetch the asset in the browser, import the WASM file when in
|
||||
// a Cloudflare Worker, and read the file from the file system when in
|
||||
// Node.js.
|
||||
|
||||
const wasmSource = await readFile(id)
|
||||
|
||||
const refId = this.emitFile({
|
||||
type: "asset",
|
||||
name: path.basename(id),
|
||||
source: wasmSource,
|
||||
})
|
||||
|
||||
return `
|
||||
import WASM_URL from ${JSON.stringify(`${id}?url`)}
|
||||
|
||||
let promise
|
||||
export default function() {
|
||||
if (import.meta.env.SSR) {
|
||||
if (typeof navigator !== "undefined" && navigator.userAgent === "Cloudflare-Workers") {
|
||||
return promise ?? (promise = import(import.meta.${importMetaPrefix}${refId}))
|
||||
} else {
|
||||
return promise ?? (promise = import(\`\${"node:fs/promises"}\`)
|
||||
.then(({ readFile }) => readFile(new URL(import.meta.ROLLUP_FILE_URL_${refId}))))
|
||||
}
|
||||
} else {
|
||||
return promise ?? (promise = fetch(WASM_URL).then(r => r.arrayBuffer()))
|
||||
}
|
||||
}
|
||||
`
|
||||
},
|
||||
resolveImportMeta(property, { chunkId }) {
|
||||
if (!property?.startsWith(importMetaPrefix)) return
|
||||
|
||||
const refId = property.slice(importMetaPrefix.length)
|
||||
const fileName = this.getFileName(refId)
|
||||
const relativePath = path.relative(path.dirname(chunkId), fileName)
|
||||
|
||||
return JSON.stringify(relativePath)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
9
website/wrangler.toml
Normal file
9
website/wrangler.toml
Normal file
|
@ -0,0 +1,9 @@
|
|||
name = "pesde"
|
||||
compatibility_date = "2024-11-30"
|
||||
pages_build_output_dir = ".svelte-kit/cloudflare"
|
||||
|
||||
[vars]
|
||||
PUBLIC_REGISTRY_URL = "https://registry.pesde.daimond113.com/v0/"
|
||||
|
||||
[observability]
|
||||
enabled = true
|
Loading…
Add table
Reference in a new issue