feat: 🎉 initial commit

This commit is contained in:
daimond113 2024-03-04 21:18:49 +01:00
commit ec1c6fcffa
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
38 changed files with 13248 additions and 0 deletions

6
.dockerignore Normal file
View file

@ -0,0 +1,6 @@
*
!src
!registry
!Cargo.lock
!Cargo.toml
!rust-toolchain.toml

101
.github/workflows/release.yaml vendored Normal file
View file

@ -0,0 +1,101 @@
name: Release
on:
push:
tags:
- v*
jobs:
build:
strategy:
matrix:
include:
- os: ubuntu-latest
host: linux
label: linux-x86_64
target: x86_64-unknown-linux-gnu
- os: windows-latest
host: windows
label: windows-x86_64
target: x86_64-pc-windows-msvc
- os: macos-latest
host: macos
label: macos-x86_64
target: x86_64-apple-darwin
- os: macos-latest-xlarge
host: macos
label: macos-aarch64
target: aarch64-apple-darwin
runs-on: ${{ matrix.os }}
name: Build for ${{ matrix.label }}
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: moonrepo/setup-rust@v1
with:
targets: ${{ matrix.target }}
- name: Set env
shell: bash
run: |
BIN_NAME=pesde
ARCHIVE_NAME=$BIN_NAME-$(echo ${{ github.ref_name }} | cut -c 2-)-${{ matrix.label }}.zip
echo "BIN_NAME=$BIN_NAME" >> $GITHUB_ENV
echo "ARCHIVE_NAME=$ARCHIVE_NAME" >> $GITHUB_ENV
- name: Build
run: cargo build --bins --all-features --release --target ${{ matrix.target }} --locked
- name: Archive
shell: bash
run: |
if [ ${{ matrix.host }} = "windows" ]; then
cp target/${{ matrix.target }}/release/${{ env.BIN_NAME }}.exe ${{ env.BIN_NAME }}.exe
7z a ${{ env.ARCHIVE_NAME }} ${{ env.BIN_NAME }}.exe
else
cp target/${{ matrix.target }}/release/${{ env.BIN_NAME }} ${{ env.BIN_NAME }}
zip -r ${{ env.ARCHIVE_NAME }} ${{ env.BIN_NAME }}
fi
- name: Upload assets
uses: actions/upload-artifact@v4
with:
name: ${{ env.ARCHIVE_NAME }}
path: ${{ env.ARCHIVE_NAME }}
create_release:
name: Create Release
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: read
needs: [build]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true
- name: Generate a changelog
uses: orhun/git-cliff-action@v3
id: git-cliff
with:
config: cliff.toml
args: --verbose --current --strip header --github-repo ${{ github.repository }} --github-token ${{ secrets.GITHUB_TOKEN }}
- name: Create Release
id: create_release
uses: softprops/action-gh-release@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
tag_name: ${{ github.ref_name }}
name: ${{ github.ref_name }}
body: ${{ steps.git-cliff.outputs.content }}
draft: true
prerelease: false
files: artifacts/*

35
.github/workflows/test-and-lint.yaml vendored Normal file
View file

@ -0,0 +1,35 @@
name: Test & Lint
on:
- push
- pull_request
env:
CARGO_TERM_COLOR: always
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: moonrepo/setup-rust@v1
with:
bins: cargo-tarpaulin
components: rustfmt, clippy
- name: Run tests
run: cargo test --all
- name: Check formatting
run: cargo fmt --all -- --check
- name: Run clippy
run: cargo clippy --all-targets --all-features -- -D warnings
- name: Generate coverage report
run: cargo tarpaulin --all-features --out xml --exclude-files src/cli/* --exclude-files registry/* --exclude-files src/main.rs --skip-clean
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4.0.1
with:
token: ${{ secrets.CODECOV_TOKEN }}

7
.gitignore vendored Normal file
View file

@ -0,0 +1,7 @@
**/target
**/.env
.idea
cobertura.xml
tarpaulin-report.html
build_rs_cov.profraw
registry/cache

5550
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

56
Cargo.toml Normal file
View file

@ -0,0 +1,56 @@
[package]
name = "pesde"
version = "0.1.0"
edition = "2021"
license = "MIT"
authors = ["daimond113 <contact@daimond113.com>"]
description = "A package manager for Roblox"
homepage = "https://pesde.daimond113.com"
[features]
bin = ["clap", "directories", "keyring", "anyhow", "ignore", "pretty_env_logger", "serde_json", "reqwest/json", "reqwest/multipart", "lune", "futures-executor", "indicatif", "auth-git2", "indicatif-log-bridge"]
[[bin]]
name = "pesde"
path = "src/main.rs"
required-features = ["bin"]
[dependencies]
serde = { version = "1.0.197", features = ["derive"] }
serde_yaml = "0.9.32"
git2 = "0.18.2"
semver = { version = "1.0.22", features = ["serde"] }
reqwest = { version = "0.11.24", default-features = false, features = ["rustls-tls", "blocking"] }
tar = "0.4.40"
flate2 = "1.0.28"
pathdiff = "0.2.1"
relative-path = { version = "1.9.2", features = ["serde"] }
log = "0.4.20"
thiserror = "1.0.57"
threadpool = "1.8.1"
full_moon = { version = "0.19.0", features = ["stacker", "roblox"] }
clap = { version = "4.5.1", features = ["derive"], optional = true }
directories = { version = "5.0.1", optional = true }
keyring = { version = "2.3.2", optional = true }
anyhow = { version = "1.0.80", optional = true }
ignore = { version = "0.4.22", optional = true }
pretty_env_logger = { version = "0.5.0", optional = true }
serde_json = { version = "1.0.114", optional = true }
lune = { version = "0.8.0", optional = true }
futures-executor = { version = "0.3.30", optional = true }
indicatif = { version = "0.17.8", optional = true }
auth-git2 = { version = "0.5.3", optional = true }
indicatif-log-bridge = { version = "0.2.2", optional = true }
[dev-dependencies]
tempfile = "3.10.1"
[workspace]
resolver = "2"
members = [
"registry"
]
[profile.dev.package.full_moon]
opt-level = 3

9
Dockerfile Normal file
View file

@ -0,0 +1,9 @@
FROM rust:1.76
COPY . .
WORKDIR /registry
RUN cargo install --path .
CMD ["pesde-registry"]

22
README.md Normal file
View file

@ -0,0 +1,22 @@
# pesde
pesde is a package manager for Roblox that is designed to be feature-rich and easy to use.
Currently, pesde is in a very early stage of development, but already supports the following features:
- Managing dependencies
- Re-exporting types
- `bin` exports (ran with Lune)
- Patching packages
## Installation
pesde can be installed from GitHub Releases. You can find the latest release [here](https://github.com/daimond113/pesde/releases).
It can also be installed by using [Aftman](https://github.com/LPGhatguy/aftman).
## Hosting
The pesde registry is hosted on [fly.io](https://fly.io). You can find the registry [here](https://registry.pesde.daimond113.com).
## Previous art
pesde is heavily inspired by [npm](https://www.npmjs.com/), [pnpm](https://pnpm.io/), [wally](https://wally.run), and [cargo](https://doc.rust-lang.org/cargo/). It aims to take the best parts of each package manager and combine them into one.

86
cliff.toml Normal file
View file

@ -0,0 +1,86 @@
[changelog]
header = """
# Changelog\n
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n
"""
body = """
{%- macro remote_url() -%}
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
{%- endmacro -%}
{% if version -%}
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else -%}
## [Unreleased]
{% endif -%}
### Details\
{% for group, commits in commits | group_by(attribute="group") %}
#### {{ group | upper_first }}
{%- for commit in commits %}
- {{ commit.message | upper_first | trim }}\
{% if commit.github.username %} by @{{ commit.github.username }}{%- endif -%}
{% if commit.github.pr_number %} in \
[#{{ commit.github.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.github.pr_number }}) \
{%- endif -%}
{% endfor %}
{% endfor %}
{%- if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %}
## New Contributors
{%- endif -%}
{% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %}
* @{{ contributor.username }} made their first contribution
{%- if contributor.pr_number %} in \
[#{{ contributor.pr_number }}](({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \
{%- endif %}
{%- endfor %}\n
"""
footer = """
{%- macro remote_url() -%}
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
{%- endmacro -%}
{% for release in releases -%}
{% if release.version -%}
{% if release.previous.version -%}
[{{ release.version | trim_start_matches(pat="v") }}]: \
{{ self::remote_url() }}/compare/{{ release.previous.version }}..{{ release.version }}
{% endif -%}
{% else -%}
[unreleased]: {{ self::remote_url() }}/compare/{{ release.previous.version }}..HEAD
{% endif -%}
{% endfor %}
<!-- generated by git-cliff -->
"""
trim = true
[git]
conventional_commits = true
filter_unconventional = true
split_commits = false
commit_parsers = [
{ message = "^feat", group = "Features" },
{ message = "^fix", group = "Bug Fixes" },
{ message = "^doc", group = "Documentation", default_scope = "unscoped" },
{ message = "^perf", group = "Performance" },
{ message = "^refactor", group = "Refactor" },
{ message = "^style", group = "Styling" },
{ message = "^test", group = "Testing" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore", group = "Miscellaneous Tasks" },
{ body = ".*security", group = "Security" },
]
protect_breaking_commits = true
filter_commits = true
tag_pattern = "v[0-9].*"
ignore_tags = ""
topo_order = true
sort_commits = "newest"

31
fly.toml Normal file
View file

@ -0,0 +1,31 @@
# fly.toml app configuration file generated for pesde-registry on 2024-03-04T20:57:13+01:00
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#
app = 'pesde-registry'
primary_region = 'waw'
kill_signal = 'SIGINT'
kill_timeout = '5s'
[build]
[env]
ADDRESS = '0.0.0.0'
PORT = '8080'
INDEX_REPO_URL = 'https://github.com/daimond113/pesde-index'
COMITTER_GIT_NAME = 'Pesde Index Updater'
COMITTER_GIT_EMAIL = 'pesde@daimond113.com'
[http_service]
internal_port = 8080
force_https = true
auto_stop_machines = true
auto_start_machines = true
min_machines_running = 0
processes = ['app']
[[vm]]
memory = '1gb'
cpu_kind = 'shared'
cpus = 1

11
registry/.env.example Normal file
View file

@ -0,0 +1,11 @@
INDEX_REPO_URL=# url of the git repository to be used as the package index
S3_ENDPOINT=# endpoint of the s3 bucket
S3_BUCKET_NAME=# name of the s3 bucket
S3_REGION=# region of the s3 bucket
S3_ACCESS_KEY=# access key of the s3 bucket
S3_SECRET_KEY=# secret key of the s3 bucket
COMMITTER_GIT_NAME=# name of the committer used for index updates
COMMITTER_GIT_EMAIL=# email of the committer used for index updates
GITHUB_USERNAME=# username of github account with push access to the index repository
GITHUB_PAT=# personal access token of github account with push access to the index repository
SENTRY_URL=# optional url of sentry error tracking

3201
registry/Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

29
registry/Cargo.toml Normal file
View file

@ -0,0 +1,29 @@
[package]
name = "pesde-registry"
version = "0.1.0"
edition = "2021"
[dependencies]
actix-web = "4.5.1"
actix-cors = "0.7.0"
actix-web-httpauth = "0.8.1"
actix-multipart = "0.6.1"
actix-multipart-derive = "0.6.1"
dotenvy = "0.15.7"
reqwest = { version = "0.11.24", features = ["json", "blocking"] }
rusty-s3 = "0.5.0"
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114"
serde_yaml = "0.9.32"
flate2 = "1.0.28"
tar = "0.4.40"
pesde = { path = ".." }
semver = "1.0.22"
git2 = "0.18.2"
thiserror = "1.0.57"
tantivy = "0.21.1"
log = "0.4.21"
pretty_env_logger = "0.5.0"
sentry = "0.32.2"
sentry-log = "0.32.2"
sentry-actix = "0.32.2"

View file

@ -0,0 +1,8 @@
use actix_web::web;
pub mod packages;
pub mod search;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.configure(packages::configure);
}

View file

@ -0,0 +1,208 @@
use actix_multipart::form::{bytes::Bytes, MultipartForm};
use actix_web::{get, post, web, HttpResponse, Responder};
use flate2::read::GzDecoder;
use reqwest::StatusCode;
use rusty_s3::S3Action;
use tantivy::{doc, Term};
use tar::Archive;
use pesde::{
dependencies::DependencySpecifier, index::Index, manifest::Manifest, package_name::PackageName,
IGNORED_FOLDERS, MANIFEST_FILE_NAME,
};
use crate::{commit_signature, errors, AppState, UserId, S3_EXPIRY};
#[derive(MultipartForm)]
struct CreateForm {
#[multipart(limit = "4 MiB")]
tarball: Bytes,
}
#[post("/packages")]
async fn create(
form: MultipartForm<CreateForm>,
app_state: web::Data<AppState>,
user_id: web::ReqData<UserId>,
) -> Result<impl Responder, errors::Errors> {
let bytes = form.tarball.data.as_ref().to_vec();
let mut decoder = GzDecoder::new(bytes.as_slice());
let mut archive = Archive::new(&mut decoder);
let archive_entries = archive.entries()?.filter_map(|e| e.ok());
let mut manifest = None;
for mut e in archive_entries {
let Ok(path) = e.path() else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Attached file contains non-UTF-8 path".to_string(),
}));
};
let Some(path) = path.as_os_str().to_str() else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Attached file contains non-UTF-8 path".to_string(),
}));
};
match path {
MANIFEST_FILE_NAME => {
if !e.header().entry_type().is_file() {
continue;
}
let received_manifest: Manifest =
serde_yaml::from_reader(&mut e).map_err(errors::Errors::UserYaml)?;
manifest = Some(received_manifest);
}
path => {
if e.header().entry_type().is_file() {
continue;
}
if IGNORED_FOLDERS.contains(&path) {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Attached file contains forbidden directory {}", path),
}));
}
}
}
}
let Some(manifest) = manifest else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Attached file doesn't contain {MANIFEST_FILE_NAME}"),
}));
};
let (scope, name) = manifest.name.parts();
{
let mut index = app_state.index.lock().unwrap();
let config = index.config()?;
for (dependency, _) in manifest.dependencies().iter() {
match dependency {
DependencySpecifier::Git(_) => {
if !config.git_allowed {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Git dependencies are not allowed on this registry".to_string(),
}));
}
}
DependencySpecifier::Registry(registry) => {
if index.package(&registry.name).unwrap().is_none() {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Dependency {} not found", registry.name),
}));
}
}
};
}
let success = index.create_package_version(&manifest, &user_id.0)?;
if !success {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!(
"Version {} of {} already exists",
manifest.version, manifest.name
),
}));
}
index.commit_and_push(
&format!("Add version {}@{}", manifest.name, manifest.version),
&commit_signature(),
)?;
}
{
let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema();
let name_field = schema.get_field("name").unwrap();
search_writer.delete_term(Term::from_field_text(
name_field,
&manifest.name.to_string(),
));
search_writer.add_document(
doc!(
name_field => manifest.name.to_string(),
schema.get_field("version").unwrap() => manifest.version.to_string(),
schema.get_field("description").unwrap() => manifest.description.unwrap_or_default(),
)
).unwrap();
search_writer.commit().unwrap();
}
let url = app_state
.s3_bucket
.put_object(
Some(&app_state.s3_credentials),
&*format!("{scope}-{name}-{}.tar.gz", manifest.version),
)
.sign(S3_EXPIRY);
app_state.reqwest_client.put(url).body(bytes).send().await?;
Ok(HttpResponse::Ok().body(format!(
"Successfully published {}@{}",
manifest.name, manifest.version
)))
}
#[get("/packages/{author_name}/{package_name}/{version}")]
async fn get(
app_state: web::Data<AppState>,
path: web::Path<(String, String, String)>,
) -> Result<impl Responder, errors::Errors> {
let (scope, name, version) = path.into_inner();
let package_name = PackageName::new(&scope, &name)?;
{
let index = app_state.index.lock().unwrap();
if !index.package(&package_name)?.is_some() {
return Ok(HttpResponse::NotFound().finish());
}
}
let url = app_state
.s3_bucket
.get_object(
Some(&app_state.s3_credentials),
&*format!("{scope}-{name}-{version}.tar.gz"),
)
.sign(S3_EXPIRY);
let response = match app_state
.reqwest_client
.get(url)
.send()
.await?
.error_for_status()
{
Ok(response) => response,
Err(e) => {
if let Some(status) = e.status() {
if status == StatusCode::NOT_FOUND {
return Ok(HttpResponse::NotFound().finish());
}
}
return Err(e.into());
}
};
Ok(HttpResponse::Ok().body(response.bytes().await?))
}
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(create).service(get);
}

View file

@ -0,0 +1,56 @@
use actix_web::{get, web, Responder};
use serde::Deserialize;
use serde_json::{json, Value};
use crate::{errors, AppState};
#[derive(Deserialize)]
struct Query {
query: String,
}
#[get("/search")]
async fn search(
app_state: web::Data<AppState>,
query: web::Query<Query>,
) -> Result<impl Responder, errors::Errors> {
let searcher = app_state.search_reader.searcher();
let schema = searcher.schema();
let name = schema.get_field("name").unwrap();
let version = schema.get_field("version").unwrap();
let description = schema.get_field("description").unwrap();
let query = query.query.trim();
if query.is_empty() {
return Ok(web::Json(vec![]));
}
let query_parser =
tantivy::query::QueryParser::for_index(&searcher.index(), vec![name, description]);
let query = query_parser.parse_query(&query)?;
let top_docs = searcher
.search(&query, &tantivy::collector::TopDocs::with_limit(10))
.unwrap();
Ok(web::Json(
top_docs
.into_iter()
.map(|(_, doc_address)| {
let retrieved_doc = searcher.doc(doc_address).unwrap();
json!({
"name": retrieved_doc.get_first(name).unwrap().as_text().unwrap(),
"version": retrieved_doc.get_first(version).unwrap().as_text().unwrap(),
"description": retrieved_doc.get_first(description).unwrap().as_text().unwrap(),
})
})
.collect::<Vec<Value>>(),
))
}
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(search);
}

71
registry/src/errors.rs Normal file
View file

@ -0,0 +1,71 @@
use actix_web::{HttpResponse, ResponseError};
use log::error;
use serde::Serialize;
use thiserror::Error;
#[derive(Serialize)]
pub struct ErrorResponse {
pub error: String,
}
#[derive(Debug, Error)]
pub enum Errors {
#[error("io error")]
Io(#[from] std::io::Error),
#[error("user yaml error")]
UserYaml(serde_yaml::Error),
#[error("reqwest error")]
Reqwest(#[from] reqwest::Error),
#[error("package name invalid")]
PackageName(#[from] pesde::package_name::PackageNameValidationError),
#[error("config error")]
Config(#[from] pesde::index::ConfigError),
#[error("create package version error")]
CreatePackageVersion(#[from] pesde::index::CreatePackageVersionError),
#[error("commit and push error")]
CommitAndPush(#[from] pesde::index::CommitAndPushError),
#[error("index package error")]
IndexPackage(#[from] pesde::index::IndexPackageError),
#[error("error parsing query")]
QueryParser(#[from] tantivy::query::QueryParserError),
}
impl ResponseError for Errors {
fn error_response(&self) -> HttpResponse {
match self {
Errors::UserYaml(_) | Errors::PackageName(_) | Errors::QueryParser(_) => {}
Errors::CreatePackageVersion(err) => match err {
pesde::index::CreatePackageVersionError::MissingScopeOwnership => {
return HttpResponse::Unauthorized().json(ErrorResponse {
error: "You do not have permission to publish this scope".to_string(),
});
}
_ => error!("{err:?}"),
},
err => {
error!("{err:?}");
}
}
match self {
Errors::UserYaml(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Error parsing YAML file: {}", err.to_string()),
}),
Errors::PackageName(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Invalid package name: {}", err.to_string()),
}),
Errors::QueryParser(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Error parsing query: {}", err.to_string()),
}),
_ => HttpResponse::InternalServerError().finish(),
}
}
}

272
registry/src/main.rs Normal file
View file

@ -0,0 +1,272 @@
use std::{fs::read_dir, sync::Mutex, time::Duration};
use actix_cors::Cors;
use actix_web::{
dev::ServiceRequest,
error::ErrorUnauthorized,
middleware::{Compress, Condition},
rt::System,
web, App, Error, HttpMessage, HttpServer,
};
use actix_web_httpauth::{extractors::bearer::BearerAuth, middleware::HttpAuthentication};
use dotenvy::dotenv;
use git2::{Cred, Signature};
use log::info;
use reqwest::{header::AUTHORIZATION, Client};
use rusty_s3::{Bucket, Credentials, UrlStyle};
use tantivy::{doc, IndexReader, IndexWriter};
use pesde::{
index::{GitIndex, IndexFile},
package_name::PackageName,
};
mod endpoints;
mod errors;
const S3_EXPIRY: Duration = Duration::from_secs(60 * 60);
struct AppState {
s3_bucket: Bucket,
s3_credentials: Credentials,
reqwest_client: Client,
index: Mutex<GitIndex>,
search_reader: IndexReader,
search_writer: Mutex<IndexWriter>,
}
macro_rules! get_env {
($name:expr, "p") => {
std::env::var($name)
.expect(concat!("Environment variable `", $name, "` must be set"))
.parse()
.expect(concat!(
"Environment variable `",
$name,
"` must be a valid value"
))
};
($name:expr) => {
std::env::var($name).expect(concat!("Environment variable `", $name, "` must be set"))
};
($name:expr, $default:expr, "p") => {
std::env::var($name)
.unwrap_or($default.to_string())
.parse()
.expect(concat!(
"Environment variable `",
$name,
"` must a valid value"
))
};
($name:expr, $default:expr) => {
std::env::var($name).unwrap_or($default.to_string())
};
}
pub fn commit_signature<'a>() -> Signature<'a> {
Signature::now(
&get_env!("COMMITTER_GIT_NAME"),
&get_env!("COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
#[derive(Debug, Clone)]
pub struct UserId(pub u64);
async fn validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
let token = credentials.token();
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
let Ok(user_info) = app_state
.reqwest_client
.get("https://api.github.com/user")
.header(AUTHORIZATION, format!("Bearer {}", token))
.send()
.await
.map(|r| r.json::<serde_json::Value>())
else {
return Err((ErrorUnauthorized("Failed to fetch user info"), req));
};
let Ok(user_info) = user_info.await else {
return Err((ErrorUnauthorized("Failed to parse user info"), req));
};
let Some(id) = user_info["id"].as_u64() else {
return Err((ErrorUnauthorized("Failed to fetch user info"), req));
};
req.extensions_mut().insert(UserId(id));
Ok(req)
}
fn search_index(index: &GitIndex) -> (IndexReader, IndexWriter) {
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
let name =
schema_builder.add_text_field("name", tantivy::schema::TEXT | tantivy::schema::STORED);
let version =
schema_builder.add_text_field("version", tantivy::schema::TEXT | tantivy::schema::STORED);
let description = schema_builder.add_text_field(
"description",
tantivy::schema::TEXT | tantivy::schema::STORED,
);
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
let search_reader = search_index
.reader_builder()
.reload_policy(tantivy::ReloadPolicy::OnCommit)
.try_into()
.unwrap();
let mut search_writer = search_index.writer(50_000_000).unwrap();
for entry in read_dir(index.path()).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if !path.is_dir() || path.file_name().unwrap() == ".git" {
continue;
}
let scope = path.file_name().unwrap().to_str().unwrap();
for entry in read_dir(&path).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if !path.is_file() || path.extension().is_some() {
continue;
}
let package = path.file_name().unwrap().to_str().unwrap();
let package_name = PackageName::new(scope, package).unwrap();
let entries: IndexFile =
serde_yaml::from_slice(&std::fs::read(&path).unwrap()).unwrap();
let entry = entries
.iter()
.max_by(|a, b| a.version.cmp(&b.version))
.unwrap()
.clone();
search_writer
.add_document(doc!(
name => package_name.to_string(),
version => entry.version.to_string(),
description => entry.description.unwrap_or_default()
))
.unwrap();
}
}
search_writer.commit().unwrap();
(search_reader, search_writer)
}
fn main() -> std::io::Result<()> {
dotenv().ok();
let sentry_url = std::env::var("SENTRY_URL").ok();
let with_sentry = sentry_url.is_some();
let mut log_builder = pretty_env_logger::formatted_builder();
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
if with_sentry {
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
log::set_boxed_logger(Box::new(logger)).unwrap();
log::set_max_level(log::LevelFilter::Info);
} else {
log_builder.try_init().unwrap();
}
let _guard = if let Some(sentry_url) = sentry_url {
std::env::set_var("RUST_BACKTRACE", "1");
Some(sentry::init((
sentry_url,
sentry::ClientOptions {
release: sentry::release_name!(),
..Default::default()
},
)))
} else {
None
};
let address = get_env!("ADDRESS", "127.0.0.1");
let port: u16 = get_env!("PORT", "8080", "p");
let current_dir = std::env::current_dir().unwrap();
let index = GitIndex::new(
current_dir.join("cache"),
&get_env!("INDEX_REPO_URL"),
Some(Box::new(|| {
Box::new(|_, _, _| {
let username = get_env!("GITHUB_USERNAME");
let pat = get_env!("GITHUB_PAT");
Cred::userpass_plaintext(&username, &pat)
})
})),
);
index.refresh().expect("failed to refresh index");
let (search_reader, search_writer) = search_index(&index);
let app_data = web::Data::new(AppState {
s3_bucket: Bucket::new(
get_env!("S3_ENDPOINT", "p"),
UrlStyle::Path,
get_env!("S3_BUCKET_NAME"),
get_env!("S3_REGION"),
)
.unwrap(),
s3_credentials: Credentials::new(get_env!("S3_ACCESS_KEY"), get_env!("S3_SECRET_KEY")),
reqwest_client: Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.build()
.unwrap(),
index: Mutex::new(index),
search_reader,
search_writer: Mutex::new(search_writer),
});
info!("listening on {address}:{port}");
System::new().block_on(async move {
HttpServer::new(move || {
App::new()
.wrap(Condition::new(with_sentry, sentry_actix::Sentry::new()))
.wrap(Cors::permissive())
.wrap(Compress::default())
.app_data(app_data.clone())
.route("/", web::get().to(|| async { env!("CARGO_PKG_VERSION") }))
.service(
web::scope("/v0")
.configure(endpoints::search::configure)
.service(
web::scope("")
.wrap(HttpAuthentication::with_fn(validator))
.configure(endpoints::configure),
),
)
})
.bind((address, port))?
.run()
.await
})
}

2
rust-toolchain.toml Normal file
View file

@ -0,0 +1,2 @@
[toolchain]
channel = "stable"

110
src/cli/auth.rs Normal file
View file

@ -0,0 +1,110 @@
use clap::Subcommand;
use pesde::index::Index;
use reqwest::{header::AUTHORIZATION, Url};
use crate::{send_request, CliParams};
#[derive(Subcommand)]
pub enum AuthCommand {
/// Logs in to the registry
Login,
/// Logs out from the registry
Logout,
}
pub fn auth_command(cmd: AuthCommand, params: CliParams) -> anyhow::Result<()> {
let index_config = params.index.config()?;
match cmd {
AuthCommand::Login => {
let response = send_request!(params.reqwest_client.post(Url::parse_with_params(
"https://github.com/login/device/code",
&[("client_id", index_config.github_oauth_client_id.as_str())],
)?))
.json::<serde_json::Value>()?;
println!(
"go to {} and enter the code `{}`",
response["verification_uri"], response["user_code"]
);
let mut time_left = response["expires_in"]
.as_i64()
.ok_or(anyhow::anyhow!("couldn't get expires_in"))?;
let interval = std::time::Duration::from_secs(
response["interval"]
.as_u64()
.ok_or(anyhow::anyhow!("couldn't get interval"))?,
);
let device_code = response["device_code"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get device_code"))?;
while time_left > 0 {
std::thread::sleep(interval);
time_left -= interval.as_secs() as i64;
let response = send_request!(params.reqwest_client.post(Url::parse_with_params(
"https://github.com/login/oauth/access_token",
&[
("client_id", index_config.github_oauth_client_id.as_str()),
("device_code", device_code),
("grant_type", "urn:ietf:params:oauth:grant-type:device_code")
],
)?))
.json::<serde_json::Value>()?;
match response
.get("error")
.map(|s| {
s.as_str()
.ok_or(anyhow::anyhow!("couldn't get error as string"))
})
.unwrap_or(Ok(""))?
{
"authorization_pending" => continue,
"slow_down" => {
std::thread::sleep(std::time::Duration::from_secs(5));
continue;
}
"expired_token" => {
break;
}
"access_denied" => {
anyhow::bail!("access denied, re-run the login command");
}
_ => (),
}
if response.get("access_token").is_some() {
let access_token = response["access_token"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get access_token"))?;
params.api_token_entry.set_password(access_token)?;
let response = send_request!(params
.reqwest_client
.get("https://api.github.com/user")
.header(AUTHORIZATION, format!("Bearer {access_token}")))
.json::<serde_json::Value>()?;
let login = response["login"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get login"))?;
println!("you're now logged in as {login}");
return Ok(());
}
}
anyhow::bail!("code expired, please re-run the login command");
}
AuthCommand::Logout => {
params.api_token_entry.delete_password()?;
println!("you're now logged out");
}
}
Ok(())
}

66
src/cli/config.rs Normal file
View file

@ -0,0 +1,66 @@
use std::path::PathBuf;
use clap::Subcommand;
use crate::{CliConfig, CliParams};
#[derive(Subcommand)]
pub enum ConfigCommand {
/// Sets the index repository URL
SetIndexRepo {
#[clap(value_name = "URL")]
url: String,
},
/// Gets the index repository URL
GetIndexRepo,
/// Sets the cache directory
SetCacheDir {
#[clap(value_name = "DIRECTORY")]
directory: Option<PathBuf>,
},
/// Gets the cache directory
GetCacheDir,
}
pub fn config_command(cmd: ConfigCommand, params: CliParams) -> anyhow::Result<()> {
match cmd {
ConfigCommand::SetIndexRepo { url } => {
let cli_config = CliConfig {
index_repo_url: url.clone(),
..params.cli_config
};
cli_config.write(&params.directories)?;
println!("index repository url set to: `{url}`");
}
ConfigCommand::GetIndexRepo => {
println!(
"current index repository url: `{}`",
params.cli_config.index_repo_url
);
}
ConfigCommand::SetCacheDir { directory } => {
let cli_config = CliConfig {
cache_dir: directory,
..params.cli_config
};
cli_config.write(&params.directories)?;
println!(
"cache directory set to: `{}`",
cli_config.cache_dir(&params.directories).display()
);
}
ConfigCommand::GetCacheDir => {
println!(
"current cache directory: `{}`",
params.cli_config.cache_dir(&params.directories).display()
);
}
}
Ok(())
}

3
src/cli/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod auth;
pub mod config;
pub mod root;

281
src/cli/root.rs Normal file
View file

@ -0,0 +1,281 @@
use std::{
fs::{create_dir_all, read, remove_dir_all, write},
time::Duration,
};
use flate2::{write::GzEncoder, Compression};
use futures_executor::block_on;
use ignore::{overrides::OverrideBuilder, WalkBuilder};
use log::debug;
use lune::Runtime;
use reqwest::{header::AUTHORIZATION, Url};
use semver::Version;
use serde_json::Value;
use tar::Builder as TarBuilder;
use pesde::{
dependencies::PackageRef,
index::{GitIndex, Index},
manifest::Manifest,
package_name::PackageName,
patches::{create_patch, setup_patches_repo},
project::{InstallOptions, Project},
DEV_PACKAGES_FOLDER, IGNORED_FOLDERS, PACKAGES_FOLDER, PATCHES_FOLDER, SERVER_PACKAGES_FOLDER,
};
use crate::{send_request, CliParams, Command};
pub const MAX_ARCHIVE_SIZE: usize = 4 * 1024 * 1024;
fn get_project(params: &CliParams) -> anyhow::Result<Project<GitIndex>> {
Project::from_path(
&params.cwd,
params.cli_config.cache_dir(&params.directories),
params.index.clone(),
params.api_token_entry.get_password().ok(),
)
.map_err(Into::into)
}
pub fn root_command(cmd: Command, params: CliParams) -> anyhow::Result<()> {
match cmd {
Command::Install { locked } => {
let project = get_project(&params)?;
for packages_folder in &[PACKAGES_FOLDER, DEV_PACKAGES_FOLDER, SERVER_PACKAGES_FOLDER] {
if let Err(e) = remove_dir_all(&params.cwd.join(packages_folder)) {
if e.kind() != std::io::ErrorKind::NotFound {
return Err(e.into());
} else {
debug!("no {packages_folder} folder found, skipping removal");
}
};
}
let resolved_versions_map = project.manifest().dependency_tree(&project, locked)?;
let download_job = project.download(&resolved_versions_map)?;
let bar = params.multi.add(
indicatif::ProgressBar::new(resolved_versions_map.len() as u64)
.with_style(indicatif::ProgressStyle::default_bar().template(
"{msg} {bar:40.208/166} {pos}/{len} {percent}% {elapsed_precise}",
)?)
.with_message("Downloading packages"),
);
bar.enable_steady_tick(Duration::from_millis(100));
while let Ok(result) = download_job.progress().recv() {
result?;
bar.inc(1);
}
bar.finish_with_message("done");
project.install(
InstallOptions::new()
.locked(locked)
.auto_download(false)
.resolved_versions_map(resolved_versions_map),
)?;
}
Command::Run { package, args } => {
let project = get_project(&params)?;
let name: PackageName = package.parse()?;
let lockfile = project
.lockfile()?
.ok_or(anyhow::anyhow!("lockfile not found"))?;
let (_, resolved_pkg) = lockfile
.get(&name)
.and_then(|versions| versions.iter().find(|(_, pkg_ref)| pkg_ref.is_root))
.ok_or(anyhow::anyhow!("package not found in lockfile"))?;
if !resolved_pkg.is_root {
anyhow::bail!("package is not a root package");
}
let pkg_path = resolved_pkg.directory(project.path()).1;
let manifest = Manifest::from_path(&pkg_path)?;
let Some(bin_path) = manifest.exports.bin else {
anyhow::bail!("no bin found in package");
};
let absolute_bin_path = bin_path.to_path(pkg_path);
let mut runtime = Runtime::new().with_args(args);
block_on(runtime.run(
resolved_pkg.pkg_ref.name().to_string(),
&read(absolute_bin_path)?,
))?;
}
Command::Search { query } => {
let config = params.index.config()?;
let api_url = config.api();
let response = send_request!(params.reqwest_client.get(Url::parse_with_params(
&format!("{}/v0/search", api_url),
&query.map_or_else(Vec::new, |q| vec![("query", q)])
)?))
.json::<Value>()?;
for package in response.as_array().unwrap() {
println!(
"{}@{}{}",
package["name"].as_str().unwrap(),
package["version"].as_str().unwrap(),
package["description"]
.as_str()
.map(|d| if d.is_empty() {
d.to_string()
} else {
format!("\n{}\n", d)
})
.unwrap()
);
}
}
Command::Publish => {
let project = get_project(&params)?;
if project.manifest().private {
anyhow::bail!("package is private, cannot publish");
}
let encoder = GzEncoder::new(vec![], Compression::default());
let mut archive = TarBuilder::new(encoder);
let mut walk_builder = WalkBuilder::new(&params.cwd);
walk_builder.add_custom_ignore_filename(".pesdeignore");
let mut overrides = OverrideBuilder::new(&params.cwd);
for packages_folder in IGNORED_FOLDERS {
overrides.add(&format!("!{}", packages_folder))?;
}
walk_builder.overrides(overrides.build()?);
for entry in walk_builder.build() {
let entry = entry?;
let path = entry.path();
let relative_path = path.strip_prefix(&params.cwd)?;
let entry_type = entry
.file_type()
.ok_or(anyhow::anyhow!("failed to get file type"))?;
if relative_path.as_os_str().is_empty() {
continue;
}
if entry_type.is_file() {
archive.append_path_with_name(path, relative_path)?;
} else if entry_type.is_dir() {
archive.append_dir(relative_path, path)?;
}
}
let archive = archive.into_inner()?.finish()?;
if archive.len() > MAX_ARCHIVE_SIZE {
anyhow::bail!(
"archive is too big ({} bytes), max {MAX_ARCHIVE_SIZE}. aborting...",
archive.len()
);
}
let part = reqwest::blocking::multipart::Part::bytes(archive)
.file_name("tarball.tar.gz")
.mime_str("application/gzip")?;
let mut request = params
.reqwest_client
.post(format!("{}/v0/packages", project.index().config()?.api()))
.multipart(reqwest::blocking::multipart::Form::new().part("tarball", part));
if let Some(token) = project.registry_auth_token() {
request = request.header(AUTHORIZATION, format!("Bearer {token}"));
} else {
request = request.header(AUTHORIZATION, "");
}
println!("{}", send_request!(request).text()?);
}
Command::Patch { package } => {
let project = get_project(&params)?;
let (name, version) = package
.split_once('@')
.ok_or(anyhow::anyhow!("Malformed package name"))?;
let name: PackageName = name.parse()?;
let version = Version::parse(version)?;
let lockfile = project
.lockfile()?
.ok_or(anyhow::anyhow!("lockfile not found"))?;
let resolved_pkg = lockfile
.get(&name)
.and_then(|versions| versions.get(&version))
.ok_or(anyhow::anyhow!("package not found in lockfile"))?;
let dir = params.directories.data_dir().join("patches").join(format!(
"{}_{}",
name.escaped(),
version
));
if dir.exists() {
anyhow::bail!(
"patch already exists. remove the directory {} to create a new patch",
dir.display()
);
}
create_dir_all(&dir)?;
resolved_pkg.pkg_ref.download(&project, &dir)?;
match resolved_pkg.pkg_ref {
PackageRef::Git(_) => {}
_ => {
setup_patches_repo(&dir)?;
}
}
println!("done! modify the files in {} and run `{} patch-commit <DIRECTORY>` to commit the changes", dir.display(), env!("CARGO_BIN_NAME"));
}
Command::PatchCommit { dir } => {
let project = get_project(&params)?;
let manifest = Manifest::from_path(&dir)?;
let patch_path = project.path().join(PATCHES_FOLDER).join(format!(
"{}@{}.patch",
manifest.name.escaped(),
manifest.version
));
if patch_path.exists() {
anyhow::bail!(
"patch already exists. remove the file {} to create a new patch",
patch_path.display()
);
}
let patches = create_patch(&dir)?;
write(&patch_path, patches)?;
remove_dir_all(&dir)?;
println!(
"done! to apply the patch, run `{} install`",
env!("CARGO_BIN_NAME")
);
}
_ => unreachable!(),
}
Ok(())
}

161
src/dependencies/git.rs Normal file
View file

@ -0,0 +1,161 @@
use std::{fs::create_dir_all, path::Path};
use git2::{build::RepoBuilder, Repository};
use log::{debug, warn};
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
index::{remote_callbacks, Index},
manifest::{Manifest, ManifestReadError, Realm},
package_name::PackageName,
project::Project,
};
/// A dependency of a package that can be downloaded from a git repository
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct GitDependencySpecifier {
/// The URL of the git repository (can be in the form of `owner/repo`, in which case it will default to GitHub)
pub repo: String,
/// The revision of the git repository to use
pub rev: String,
/// The realm of the package
pub realm: Option<Realm>,
}
/// A reference to a package that can be downloaded from a git repository
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct GitPackageRef {
/// The name of the package
pub name: PackageName,
/// The version of the package
pub version: Version,
/// The URL of the git repository
pub repo_url: String,
/// The revision of the git repository to use
pub rev: String,
}
/// An error that occurred while downloading a git repository
#[derive(Debug, Error)]
pub enum GitDownloadError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while reading the manifest of the git repository
#[error("error reading manifest")]
ManifestRead(#[from] ManifestReadError),
}
impl GitDependencySpecifier {
pub(crate) fn resolve<I: Index>(
&self,
project: &Project<I>,
) -> Result<(Manifest, String, String), GitDownloadError> {
debug!("resolving git dependency {}", self.repo);
// should also work with ssh urls
let is_url = self.repo.contains(':');
let repo_name = {
if !is_url {
self.repo.to_string()
} else {
let parts: Vec<&str> = self.repo.split('/').collect();
format!(
"{}/{}",
parts[parts.len() - 2],
parts[parts.len() - 1].trim_end_matches(".git")
)
}
};
if is_url {
debug!("resolved git repository name to: {}", &repo_name);
} else {
debug!("assuming git repository is a name: {}", &repo_name);
}
let repo_url = {
if !is_url {
format!("https://github.com/{}.git", &self.repo)
} else {
self.repo.to_string()
}
};
if is_url {
debug!("assuming git repository is a url: {}", &repo_url);
} else {
debug!("resolved git repository url to: {}", &repo_url);
}
let dest = project
.cache_dir()
.join("git")
.join(repo_name.replace('/', "_"))
.join(&self.rev);
let repo = if !dest.exists() {
create_dir_all(&dest)?;
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks(project.index()));
RepoBuilder::new()
.fetch_options(fetch_options)
.clone(&repo_url, &dest)?
} else {
Repository::open(&dest)?
};
let obj = repo.revparse_single(&self.rev)?;
debug!("resolved git revision {} to: {}", self.rev, obj.id());
repo.reset(&obj, git2::ResetType::Hard, None)?;
Ok((
Manifest::from_path(dest)?,
repo_url.to_string(),
obj.id().to_string(),
))
}
}
impl GitPackageRef {
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>, I: Index>(
&self,
project: &Project<I>,
dest: P,
) -> Result<(), GitDownloadError> {
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks(project.index()));
let repo = RepoBuilder::new()
.fetch_options(fetch_options)
.clone(&self.repo_url, dest.as_ref())?;
let obj = repo.revparse_single(&self.rev)?;
if self.rev != obj.id().to_string() {
warn!(
"git package ref {} resolved to a different revision: {}. this shouldn't happen",
self.rev,
obj.id()
);
}
repo.reset(&obj, git2::ResetType::Hard, None)?;
Ok(())
}
}

182
src/dependencies/mod.rs Normal file
View file

@ -0,0 +1,182 @@
use log::debug;
use std::{fmt::Display, fs::create_dir_all, path::Path};
use semver::Version;
use serde::{de::IntoDeserializer, Deserialize, Deserializer, Serialize};
use thiserror::Error;
use crate::{
dependencies::{
git::{GitDependencySpecifier, GitPackageRef},
registry::{RegistryDependencySpecifier, RegistryPackageRef},
resolution::ResolvedVersionsMap,
},
index::Index,
manifest::Realm,
multithread::MultithreadedJob,
package_name::PackageName,
project::{InstallProjectError, Project},
};
/// Git dependency related stuff
pub mod git;
/// Registry dependency related stuff
pub mod registry;
/// Resolution
pub mod resolution;
// To improve developer experience, we resolve the type of the dependency specifier with a custom deserializer, so that the user doesn't have to specify the type of the dependency
/// A dependency of a package
#[derive(Serialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(untagged)]
pub enum DependencySpecifier {
/// A dependency that can be downloaded from a registry
Registry(RegistryDependencySpecifier),
/// A dependency that can be downloaded from a git repository
Git(GitDependencySpecifier),
}
impl DependencySpecifier {
/// Gets the name (or repository) of the specifier
pub fn name(&self) -> String {
match self {
DependencySpecifier::Registry(registry) => registry.name.to_string(),
DependencySpecifier::Git(git) => git.repo.to_string(),
}
}
/// Gets the version (or revision) of the specifier
pub fn version(&self) -> String {
match self {
DependencySpecifier::Registry(registry) => registry.version.to_string(),
DependencySpecifier::Git(git) => git.rev.clone(),
}
}
/// Gets the realm of the specifier
pub fn realm(&self) -> Option<&Realm> {
match self {
DependencySpecifier::Registry(registry) => registry.realm.as_ref(),
DependencySpecifier::Git(git) => git.realm.as_ref(),
}
}
}
impl<'de> Deserialize<'de> for DependencySpecifier {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let yaml = serde_yaml::Value::deserialize(deserializer)?;
let result = if yaml.get("repo").is_some() {
GitDependencySpecifier::deserialize(yaml.into_deserializer())
.map(DependencySpecifier::Git)
} else if yaml.get("name").is_some() {
RegistryDependencySpecifier::deserialize(yaml.into_deserializer())
.map(DependencySpecifier::Registry)
} else {
Err(serde::de::Error::custom("invalid dependency"))
};
result.map_err(|e| serde::de::Error::custom(e.to_string()))
}
}
// Here we don't use a custom deserializer, because this is exposed to the user only from the lock file, which mustn't be edited manually anyway
/// A reference to a package
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum PackageRef {
/// A reference to a package that can be downloaded from a registry
Registry(RegistryPackageRef),
/// A reference to a package that can be downloaded from a git repository
Git(GitPackageRef),
}
/// An error that occurred while downloading a package
#[derive(Debug, Error)]
pub enum DownloadError {
/// An error that occurred while downloading a package from a registry
#[error("error downloading package {1} from registry")]
Registry(#[source] registry::RegistryDownloadError, Box<PackageRef>),
/// An error that occurred while downloading a package from a git repository
#[error("error downloading package {1} from git repository")]
Git(#[source] git::GitDownloadError, Box<PackageRef>),
}
impl PackageRef {
/// Gets the name of the package
pub fn name(&self) -> &PackageName {
match self {
PackageRef::Registry(registry) => &registry.name,
PackageRef::Git(git) => &git.name,
}
}
/// Gets the version of the package
pub fn version(&self) -> &Version {
match self {
PackageRef::Registry(registry) => &registry.version,
PackageRef::Git(git) => &git.version,
}
}
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>, I: Index>(
&self,
project: &Project<I>,
dest: P,
) -> Result<(), DownloadError> {
match self {
PackageRef::Registry(registry) => registry
.download(project, dest)
.map_err(|e| DownloadError::Registry(e, Box::new(self.clone()))),
PackageRef::Git(git) => git
.download(project, dest)
.map_err(|e| DownloadError::Git(e, Box::new(self.clone()))),
}
}
}
impl<I: Index> Project<I> {
/// Downloads the project's dependencies
pub fn download(
&self,
map: &ResolvedVersionsMap,
) -> Result<MultithreadedJob<DownloadError>, InstallProjectError> {
let (job, tx) = MultithreadedJob::new();
for (name, versions) in map.clone() {
for (version, resolved_package) in versions {
let (_, source) = resolved_package.directory(self.path());
if source.exists() {
debug!("package {name}@{version} already downloaded, skipping...");
continue;
}
debug!(
"downloading package {name}@{version} to {}",
source.display()
);
create_dir_all(&source)?;
let project = self.clone();
let tx = tx.clone();
job.pool.execute(move || {
let result = resolved_package.pkg_ref.download(&project, source);
tx.send(result).unwrap();
});
}
}
Ok(job)
}
}
impl Display for PackageRef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@{}", self.name(), self.version())
}
}

View file

@ -0,0 +1,125 @@
use std::path::Path;
use log::{debug, error};
use reqwest::header::{AUTHORIZATION, USER_AGENT as USER_AGENT_HEADER};
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
index::Index, manifest::Realm, package_name::PackageName, project::Project, USER_AGENT,
};
/// A dependency of a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct RegistryDependencySpecifier {
/// The name of the package
pub name: PackageName,
/// The version requirement of the package
pub version: VersionReq,
// TODO: support per-package registries
// #[serde(skip_serializing_if = "Option::is_none")]
// pub registry: Option<String>,
/// The realm of the package
pub realm: Option<Realm>,
}
/// A reference to a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct RegistryPackageRef {
/// The name of the package
pub name: PackageName,
/// The version of the package
pub version: Version,
// TODO: support per-package registries
// #[serde(skip_serializing_if = "Option::is_none")]
// pub index_url: Option<String>,
}
/// An error that occurred while downloading a package from a registry
#[derive(Debug, Error)]
pub enum RegistryDownloadError {
/// An error that occurred while interacting with reqwest
#[error("error interacting with reqwest")]
Reqwest(#[from] reqwest::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while reading the index config
#[error("error with the index config")]
IndexConfig(#[from] crate::index::ConfigError),
/// The package was not found on the registry
#[error("package {0} not found on the registry, but found in the index")]
NotFound(PackageName),
/// The user is unauthorized to download the package
#[error("unauthorized to download package {0}")]
Unauthorized(PackageName),
/// An HTTP error occurred
#[error("http error {0}: the server responded with {1}")]
Http(reqwest::StatusCode, String),
}
impl RegistryPackageRef {
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>, I: Index>(
&self,
project: &Project<I>,
dest: P,
) -> Result<(), RegistryDownloadError> {
let url = project
.index()
.config()?
.download()
.replace("{PACKAGE_AUTHOR}", self.name.scope())
.replace("{PACKAGE_NAME}", self.name.name())
.replace("{PACKAGE_VERSION}", &self.version.to_string());
debug!(
"downloading registry package {}@{} from {}",
self.name, self.version, url
);
let client = reqwest::blocking::Client::new();
let response = {
let mut builder = client.get(&url).header(USER_AGENT_HEADER, USER_AGENT);
if let Some(token) = project.registry_auth_token() {
let visible_tokens = token.chars().take(8).collect::<String>();
let hidden_tokens = "*".repeat(token.len() - 8);
debug!("using registry token {visible_tokens}{hidden_tokens}");
builder = builder.header(AUTHORIZATION, format!("Bearer {}", token));
}
builder.send()?
};
if !response.status().is_success() {
return match response.status() {
reqwest::StatusCode::NOT_FOUND => {
Err(RegistryDownloadError::NotFound(self.name.clone()))
}
reqwest::StatusCode::UNAUTHORIZED => {
Err(RegistryDownloadError::Unauthorized(self.name.clone()))
}
_ => Err(RegistryDownloadError::Http(
response.status(),
response.text()?,
)),
};
}
let bytes = response.bytes()?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder);
archive.unpack(&dest)?;
Ok(())
}
}

View file

@ -0,0 +1,442 @@
use std::{
collections::{BTreeMap, BTreeSet, VecDeque},
fmt::Display,
path::{Path, PathBuf},
};
use log::debug;
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
dependencies::{
git::{GitDownloadError, GitPackageRef},
registry::{RegistryDependencySpecifier, RegistryPackageRef},
DependencySpecifier, PackageRef,
},
index::{Index, IndexPackageError},
manifest::{DependencyType, Manifest, Realm},
package_name::PackageName,
project::{Project, ReadLockfileError},
DEV_PACKAGES_FOLDER, INDEX_FOLDER, PACKAGES_FOLDER, SERVER_PACKAGES_FOLDER,
};
/// A node in the dependency tree
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct ResolvedPackage {
/// The reference to the package
pub pkg_ref: PackageRef,
/// The specifier that resolved to this package
pub specifier: DependencySpecifier,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
pub dependencies: BTreeSet<(PackageName, Version)>,
/// Whether the package is a root package (top-level dependency)
pub is_root: bool,
/// The realm of the package
pub realm: Realm,
/// The type of the dependency
pub dep_type: DependencyType,
}
impl Display for ResolvedPackage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.pkg_ref)
}
}
pub(crate) fn packages_folder(realm: &Realm) -> &str {
match realm {
Realm::Shared => PACKAGES_FOLDER,
Realm::Server => SERVER_PACKAGES_FOLDER,
Realm::Development => DEV_PACKAGES_FOLDER,
}
}
impl ResolvedPackage {
pub(crate) fn packages_folder(&self) -> &str {
packages_folder(&self.realm)
}
/// Returns the directory of the package in the project, and the parent of the directory
pub fn directory<P: AsRef<Path>>(&self, project_path: P) -> (PathBuf, PathBuf) {
let name = self.pkg_ref.name().escaped();
let container_path = project_path
.as_ref()
.join(self.packages_folder())
.join(INDEX_FOLDER)
.join(&name)
.join(self.pkg_ref.version().to_string());
(container_path.clone(), container_path.join(&name))
}
}
/// A flat resolved map, a map of package names to versions to resolved packages
pub type ResolvedVersionsMap = BTreeMap<PackageName, BTreeMap<Version, ResolvedPackage>>;
macro_rules! find_highest {
($iter:expr, $dep:expr) => {
$iter
.filter(|v| $dep.version.matches(v))
.max_by(|a, b| a.cmp(&b))
.cloned()
};
}
fn find_realm(a: &Realm, b: &Realm) -> Realm {
if a == b {
return *a;
}
Realm::Shared
}
fn add_to_map(
map: &mut ResolvedVersionsMap,
name: &PackageName,
version: &Version,
resolved_package: &ResolvedPackage,
lockfile: &ResolvedVersionsMap,
depth: usize,
) -> Result<(), ResolveError> {
debug!(
"{}resolved {resolved_package} from lockfile",
"\t".repeat(depth)
);
map.entry(name.clone())
.or_default()
.insert(version.clone(), resolved_package.clone());
for (dep_name, dep_version) in &resolved_package.dependencies {
if map.get(dep_name).and_then(|v| v.get(dep_version)).is_none() {
let dep = lockfile.get(dep_name).and_then(|v| v.get(dep_version));
match dep {
Some(dep) => add_to_map(map, dep_name, dep_version, dep, lockfile, depth + 1)?,
// the lockfile is malformed
None => return Err(ResolveError::OutOfDateLockfile),
}
}
}
Ok(())
}
/// An error that occurred while resolving dependencies
#[derive(Debug, Error)]
pub enum ResolveError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred because a registry dependency conflicts with a git dependency
#[error("registry dependency {0}@{1} conflicts with git dependency")]
RegistryConflict(PackageName, Version),
/// An error that occurred because a git dependency conflicts with a registry dependency
#[error("git dependency {0}@{1} conflicts with registry dependency")]
GitConflict(PackageName, Version),
/// An error that occurred because no satisfying version was found for a dependency
#[error("no satisfying version found for dependency {0:?}")]
NoSatisfyingVersion(RegistryDependencySpecifier),
/// An error that occurred while downloading a package from a git repository
#[error("error downloading git package")]
GitDownload(#[from] GitDownloadError),
/// An error that occurred because a package was not found in the index
#[error("package {0} not found in index")]
PackageNotFound(PackageName),
/// An error that occurred while getting a package from the index
#[error("failed to get package {1} from index")]
IndexPackage(#[source] IndexPackageError, PackageName),
/// An error that occurred while reading the lockfile
#[error("failed to read lockfile")]
LockfileRead(#[from] ReadLockfileError),
/// An error that occurred because the lockfile is out of date
#[error("out of date lockfile")]
OutOfDateLockfile,
/// An error that occurred because two realms are incompatible
#[error("incompatible realms for package {0} (package specified {1}, user specified {2})")]
IncompatibleRealms(PackageName, Realm, Realm),
/// An error that occurred because a peer dependency is not installed
#[error("peer dependency {0}@{1} is not installed")]
PeerNotInstalled(PackageName, Version),
}
impl Manifest {
/// Resolves the dependency tree for the project
pub fn dependency_tree<I: Index>(
&self,
project: &Project<I>,
locked: bool,
) -> Result<ResolvedVersionsMap, ResolveError> {
debug!("resolving dependency tree for project {}", self.name);
// try to reuse versions (according to semver specifiers) to decrease the amount of downloads and storage
let mut resolved_versions_map: ResolvedVersionsMap = BTreeMap::new();
let tree = if let Some(lockfile) = project.lockfile()? {
debug!("lockfile found, resolving dependencies from it");
let mut missing = Vec::new();
// resolve all root dependencies (and their dependencies) from the lockfile
for (name, versions) in &lockfile {
for (version, resolved_package) in versions {
if !resolved_package.is_root
|| !self
.dependencies()
.into_iter()
.any(|(spec, _)| spec == resolved_package.specifier)
{
continue;
}
add_to_map(
&mut resolved_versions_map,
name,
version,
resolved_package,
&lockfile,
1,
)?;
}
}
// resolve new, or modified, dependencies from the lockfile
'outer: for (dep, dep_type) in self.dependencies() {
for versions in resolved_versions_map.values() {
for resolved_package in versions.values() {
if resolved_package.specifier == dep && resolved_package.is_root {
continue 'outer;
}
}
}
if locked {
return Err(ResolveError::OutOfDateLockfile);
}
missing.push((dep.clone(), dep_type));
}
debug!(
"resolved {} dependencies from lockfile. new dependencies: {}",
resolved_versions_map.len(),
missing.len()
);
missing
} else {
debug!("no lockfile found, resolving all dependencies");
self.dependencies()
};
if tree.is_empty() {
debug!("no dependencies left to resolve, finishing...");
return Ok(resolved_versions_map);
}
debug!("resolving {} dependencies from index", tree.len());
let mut queue = VecDeque::from_iter(self.dependencies().into_iter().map(|d| (d, None)));
while let Some(((specifier, dep_type), dependant)) = queue.pop_front() {
let (pkg_ref, default_realm, dependencies) = match &specifier {
DependencySpecifier::Registry(registry_dependency) => {
let index_entries = project
.index()
.package(&registry_dependency.name)
.map_err(|e| {
ResolveError::IndexPackage(e, registry_dependency.name.clone())
})?
.ok_or_else(|| {
ResolveError::PackageNotFound(registry_dependency.name.clone())
})?;
let resolved_versions = resolved_versions_map
.entry(registry_dependency.name.clone())
.or_default();
// try to find the highest already downloaded version that satisfies the requirement, otherwise find the highest satisfying version in the index
let Some(version) =
find_highest!(resolved_versions.keys(), registry_dependency).or_else(
|| {
find_highest!(
index_entries.iter().map(|v| &v.version),
registry_dependency
)
},
)
else {
return Err(ResolveError::NoSatisfyingVersion(
registry_dependency.clone(),
));
};
let entry = index_entries
.into_iter()
.find(|e| e.version.eq(&version))
.unwrap();
debug!(
"resolved registry dependency {} to {}",
registry_dependency.name, version
);
(
PackageRef::Registry(RegistryPackageRef {
name: registry_dependency.name.clone(),
version: version.clone(),
}),
entry.realm,
entry.dependencies,
)
}
DependencySpecifier::Git(git_dependency) => {
let (manifest, url, rev) = git_dependency.resolve(project)?;
debug!(
"resolved git dependency {} to {url}#{rev}",
git_dependency.repo
);
(
PackageRef::Git(GitPackageRef {
name: manifest.name.clone(),
version: manifest.version.clone(),
repo_url: url,
rev,
}),
manifest.realm,
manifest.dependencies(),
)
}
};
let is_root = dependant.is_none();
// if the dependency is a root dependency, it can be thought of as a normal dependency
let dep_type = if is_root {
DependencyType::Normal
} else {
dep_type
};
if let Some((dependant_name, dependant_version)) = dependant {
resolved_versions_map
.get_mut(&dependant_name)
.and_then(|v| v.get_mut(&dependant_version))
.unwrap()
.dependencies
.insert((pkg_ref.name().clone(), pkg_ref.version().clone()));
}
let resolved_versions = resolved_versions_map
.entry(pkg_ref.name().clone())
.or_default();
if let Some(previously_resolved) = resolved_versions.get_mut(pkg_ref.version()) {
match (&pkg_ref, &previously_resolved.pkg_ref) {
(PackageRef::Registry(r), PackageRef::Git(_g)) => {
return Err(ResolveError::RegistryConflict(
r.name.clone(),
r.version.clone(),
));
}
(PackageRef::Git(g), PackageRef::Registry(_r)) => {
return Err(ResolveError::GitConflict(g.name.clone(), g.version.clone()));
}
_ => (),
}
if previously_resolved.dep_type == DependencyType::Peer
&& dep_type == DependencyType::Normal
{
previously_resolved.dep_type = dep_type;
}
// need not resolve the package again
continue;
}
if specifier
.realm()
.is_some_and(|realm| realm == &Realm::Shared)
&& default_realm.is_some_and(|realm| realm == Realm::Server)
{
return Err(ResolveError::IncompatibleRealms(
pkg_ref.name().clone(),
default_realm.unwrap(),
*specifier.realm().unwrap(),
));
}
resolved_versions.insert(
pkg_ref.version().clone(),
ResolvedPackage {
pkg_ref: pkg_ref.clone(),
specifier: specifier.clone(),
dependencies: BTreeSet::new(),
is_root,
realm: *specifier
.realm()
.copied()
.unwrap_or_default()
.or(&default_realm.unwrap_or_default()),
dep_type,
},
);
for dependency in dependencies {
queue.push_back((
dependency,
Some((pkg_ref.name().clone(), pkg_ref.version().clone())),
));
}
}
debug!("resolving realms and peer dependencies...");
for (name, versions) in resolved_versions_map.clone() {
for (version, resolved_package) in versions {
if resolved_package.dep_type == DependencyType::Peer {
return Err(ResolveError::PeerNotInstalled(
resolved_package.pkg_ref.name().clone(),
resolved_package.pkg_ref.version().clone(),
));
}
let mut realm = resolved_package.realm;
for (dep_name, dep_version) in &resolved_package.dependencies {
let dep = resolved_versions_map
.get(dep_name)
.and_then(|v| v.get(dep_version));
if let Some(dep) = dep {
realm = find_realm(&realm, &dep.realm);
}
}
resolved_versions_map
.get_mut(&name)
.and_then(|v| v.get_mut(&version))
.unwrap()
.realm = realm;
}
}
debug!("finished resolving dependency tree");
Ok(resolved_versions_map)
}
}

558
src/index.rs Normal file
View file

@ -0,0 +1,558 @@
use std::{
collections::BTreeSet,
fmt::Debug,
fs::create_dir_all,
hash::Hash,
path::{Path, PathBuf},
sync::Arc,
};
use git2::{build::RepoBuilder, Remote, Repository, Signature};
use log::debug;
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
dependencies::DependencySpecifier,
manifest::{DependencyType, Manifest, Realm},
package_name::PackageName,
};
/// Owners of a scope
pub type ScopeOwners = BTreeSet<u64>;
/// A packages index
pub trait Index: Send + Sync + Debug + Clone + 'static {
/// Gets the owners of a scope
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError>;
/// Creates a scope
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError>;
/// Gets a package from the index
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError>;
/// Creates a package version
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<bool, CreatePackageVersionError>;
/// Gets the index's configuration
fn config(&self) -> Result<IndexConfig, ConfigError>;
/// Returns a function that gets the credentials for a git repository
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>>;
}
/// A function that gets the credentials for a git repository
pub type CredentialsFn = Box<
dyn Fn() -> Box<
dyn FnMut(&str, Option<&str>, git2::CredentialType) -> Result<git2::Cred, git2::Error>,
> + Send
+ Sync,
>;
/// The packages index
#[derive(Clone)]
pub struct GitIndex {
path: PathBuf,
repo_url: String,
pub(crate) credentials_fn: Option<Arc<CredentialsFn>>,
}
impl Debug for GitIndex {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GitIndex")
.field("path", &self.path)
.field("repo_url", &self.repo_url)
.finish()
}
}
impl Hash for GitIndex {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.path.hash(state);
self.repo_url.hash(state);
}
}
impl PartialEq for GitIndex {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.repo_url == other.repo_url
}
}
impl Eq for GitIndex {}
/// An error that occurred while getting the index's refspec
#[derive(Debug, Error)]
pub enum GetRefSpecError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// The refspec for the upstream branch was not found
#[error("refspec not found for upstream branch {0}")]
RefSpecNotFound(String),
/// The refspec is not utf-8
#[error("refspec not utf-8")]
RefSpecNotUtf8,
/// The upstream branch was not found
#[error("upstream branch not found")]
UpstreamBranchNotFound,
/// The upstream branch is not utf-8
#[error("upstream branch not utf-8")]
UpstreamBranchNotUtf8,
}
/// An error that occurred while refreshing the index
#[derive(Debug, Error)]
pub enum RefreshError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting the index's refspec
#[error("error getting refspec")]
GetRefSpec(#[from] GetRefSpecError),
}
/// An error that occurred while interacting with the scope owners
#[derive(Debug, Error)]
pub enum ScopeOwnersError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the scope owners
#[error("error deserializing scope owners")]
ScopeOwnersDeser(#[source] serde_yaml::Error),
/// An error that occurred while committing and pushing to the index
#[error("error committing and pushing to the index")]
CommitAndPush(#[from] CommitAndPushError),
}
/// An error that occurred while committing and pushing to the index
#[derive(Debug, Error)]
pub enum CommitAndPushError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting the index's refspec
#[error("error getting refspec")]
GetRefSpec(#[from] GetRefSpecError),
}
/// An error that occurred while getting a package from the index
#[derive(Debug, Error)]
pub enum IndexPackageError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the index file
#[error("error deserializing index file")]
FileDeser(#[source] serde_yaml::Error),
}
/// An error that occurred while creating a package version
#[derive(Debug, Error)]
pub enum CreatePackageVersionError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting a package from the index
#[error("error getting a package from the index")]
IndexPackage(#[from] IndexPackageError),
/// An error that occurred while serializing the index file
#[error("error serializing index file")]
FileSer(#[source] serde_yaml::Error),
/// An error that occurred while committing and pushing to the index
#[error("error committing and pushing to the index")]
CommitAndPush(#[from] CommitAndPushError),
/// An error that occurred while interacting with the scope owners
#[error("error interacting with the scope owners")]
ScopeOwners(#[from] ScopeOwnersError),
/// The scope is missing ownership
#[error("missing scope ownership")]
MissingScopeOwnership,
}
/// An error that occurred while getting the index's configuration
#[derive(Debug, Error)]
pub enum ConfigError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the index config
#[error("error deserializing index config")]
ConfigDeser(#[source] serde_yaml::Error),
/// The index does not have a config file
#[error("index does not have a config file - this is an issue with the index, please contact the maintainer of the index")]
MissingConfig,
}
fn get_refspec(
repo: &Repository,
remote: &mut Remote,
) -> Result<(String, String), GetRefSpecError> {
let upstream_branch_buf = repo.branch_upstream_name(
repo.head()?
.name()
.ok_or(GetRefSpecError::UpstreamBranchNotFound)?,
)?;
let upstream_branch = upstream_branch_buf
.as_str()
.ok_or(GetRefSpecError::UpstreamBranchNotUtf8)?;
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.ok_or(GetRefSpecError::RefSpecNotFound(
upstream_branch.to_string(),
))?
.rtransform(upstream_branch)?;
let refspec = refspec_buf
.as_str()
.ok_or(GetRefSpecError::RefSpecNotUtf8)?;
Ok((refspec.to_string(), upstream_branch.to_string()))
}
pub(crate) fn remote_callbacks<I: Index>(index: &I) -> git2::RemoteCallbacks {
let mut remote_callbacks = git2::RemoteCallbacks::new();
if let Some(credentials) = &index.credentials_fn() {
let credentials = std::sync::Arc::clone(credentials);
remote_callbacks.credentials(move |a, b, c| credentials()(a, b, c));
}
remote_callbacks
}
impl GitIndex {
/// Creates a new git index. The `refresh` method must be called before using the index, preferably immediately after creating it.
pub fn new<P: AsRef<Path>>(
path: P,
repo_url: &str,
credentials: Option<CredentialsFn>,
) -> Self {
Self {
path: path.as_ref().to_path_buf(),
repo_url: repo_url.to_string(),
credentials_fn: credentials.map(Arc::new),
}
}
/// Gets the path of the index
pub fn path(&self) -> &Path {
&self.path
}
/// Gets the URL of the index's repository
pub fn repo_url(&self) -> &str {
&self.repo_url
}
/// Refreshes the index
pub fn refresh(&self) -> Result<(), RefreshError> {
let repo = if self.path.exists() {
Repository::open(&self.path).ok()
} else {
None
};
if let Some(repo) = repo {
let mut remote = repo.find_remote("origin")?;
let (refspec, upstream_branch) = get_refspec(&repo, &mut remote)?;
remote.fetch(
&[&refspec],
Some(git2::FetchOptions::new().remote_callbacks(remote_callbacks(self))),
None,
)?;
let commit = repo.find_reference(&upstream_branch)?.peel_to_commit()?;
debug!(
"refreshing index, fetching {refspec}#{} from origin",
commit.id().to_string()
);
repo.reset(&commit.into_object(), git2::ResetType::Hard, None)?;
Ok(())
} else {
debug!(
"refreshing index - first time, cloning {} into {}",
self.repo_url,
self.path.display()
);
create_dir_all(&self.path)?;
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks(self));
RepoBuilder::new()
.fetch_options(fetch_options)
.clone(&self.repo_url, &self.path)?;
Ok(())
}
}
/// Commits and pushes to the index
pub fn commit_and_push(
&self,
message: &str,
signature: &Signature,
) -> Result<(), CommitAndPushError> {
let repo = Repository::open(&self.path)?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let oid = index.write_tree()?;
let tree = repo.find_tree(oid)?;
let parent_commit = repo.head()?.peel_to_commit()?;
repo.commit(
Some("HEAD"),
signature,
signature,
message,
&tree,
&[&parent_commit],
)?;
let mut remote = repo.find_remote("origin")?;
let (refspec, _) = get_refspec(&repo, &mut remote)?;
remote.push(
&[&refspec],
Some(git2::PushOptions::new().remote_callbacks(remote_callbacks(self))),
)?;
Ok(())
}
}
impl Index for GitIndex {
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError> {
let path = self.path.join(scope).join("owners.yaml");
if !path.exists() {
return Ok(None);
}
let contents = std::fs::read(&path)?;
let owners: ScopeOwners =
serde_yaml::from_slice(&contents).map_err(ScopeOwnersError::ScopeOwnersDeser)?;
Ok(Some(owners))
}
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError> {
let path = self.path.join(scope);
if path.exists() {
return Ok(false);
}
create_dir_all(&path)?;
serde_yaml::to_writer(std::fs::File::create(path.join("owners.yaml"))?, owners)
.map_err(ScopeOwnersError::ScopeOwnersDeser)?;
Ok(true)
}
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError> {
let path = self.path.join(name.scope()).join(name.name());
if !path.exists() {
return Ok(None);
}
let contents = std::fs::read(&path)?;
let file: IndexFile =
serde_yaml::from_slice(&contents).map_err(IndexPackageError::FileDeser)?;
Ok(Some(file))
}
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<bool, CreatePackageVersionError> {
let scope = manifest.name.scope();
if let Some(owners) = self.scope_owners(scope)? {
if !owners.contains(uploader) {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
} else if !self.create_scope_for(scope, &BTreeSet::from([*uploader]))? {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
let path = self.path.join(scope);
let mut file = if let Some(file) = self.package(&manifest.name)? {
if file.iter().any(|e| e.version == manifest.version) {
return Ok(false);
}
file
} else {
vec![]
};
file.push(manifest.clone().into());
serde_yaml::to_writer(
std::fs::File::create(path.join(manifest.name.name()))?,
&file,
)
.map_err(CreatePackageVersionError::FileSer)?;
Ok(true)
}
fn config(&self) -> Result<IndexConfig, ConfigError> {
let path = self.path.join("config.yaml");
if !path.exists() {
return Err(ConfigError::MissingConfig);
}
let contents = std::fs::read(&path)?;
let config: IndexConfig =
serde_yaml::from_slice(&contents).map_err(ConfigError::ConfigDeser)?;
Ok(config)
}
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>> {
self.credentials_fn.as_ref()
}
}
/// The configuration of the index
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct IndexConfig {
/// The URL of the index's API
pub api: String,
/// The URL of the index's download API, defaults to `{API_URL}/v0/packages/{PACKAGE_AUTHOR}/{PACKAGE_NAME}/{PACKAGE_VERSION}`.
/// Has the following variables:
/// - `{API_URL}`: The URL of the index's API
/// - `{PACKAGE_AUTHOR}`: The author of the package
/// - `{PACKAGE_NAME}`: The name of the package
/// - `{PACKAGE_VERSION}`: The version of the package
pub download: Option<String>,
/// Whether to allow git dependencies
#[serde(default)]
pub git_allowed: bool,
/// Whether to allow custom registries
#[serde(default)]
pub custom_registry_allowed: bool,
/// The OAuth client ID for GitHub OAuth
pub github_oauth_client_id: String,
}
impl IndexConfig {
/// Gets the URL of the index's API
pub fn api(&self) -> &str {
self.api.strip_suffix('/').unwrap_or(&self.api)
}
/// Gets the URL of the index's download API
pub fn download(&self) -> String {
self.download
.as_ref()
.unwrap_or(
&"{API_URL}/v0/packages/{PACKAGE_AUTHOR}/{PACKAGE_NAME}/{PACKAGE_VERSION}"
.to_string(),
)
.replace("{API_URL}", self.api())
}
}
/// An entry in the index file
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct IndexFileEntry {
/// The version of the package
pub version: Version,
/// The realm of the package
pub realm: Option<Realm>,
/// A description of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// The license of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
/// The authors of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub authors: Option<Vec<String>>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dependencies: Vec<(DependencySpecifier, DependencyType)>,
}
impl From<Manifest> for IndexFileEntry {
fn from(manifest: Manifest) -> IndexFileEntry {
let dependencies = manifest.dependencies();
IndexFileEntry {
version: manifest.version,
realm: manifest.realm,
description: manifest.description,
license: manifest.license,
authors: manifest.authors,
dependencies,
}
}
}
/// An index file
pub type IndexFile = Vec<IndexFileEntry>;

48
src/lib.rs Normal file
View file

@ -0,0 +1,48 @@
#![deny(missing_docs)]
//! pesde is a package manager for Roblox that is designed to be feature-rich and easy to use.
//! Currently, pesde is in a very early stage of development, but already supports the following features:
//! - Managing dependencies
//! - Re-exporting types
//! - `bin` exports (ran with Lune)
//! - Patching packages
/// Resolving, downloading and managing dependencies
pub mod dependencies;
/// Managing the pesde index
pub mod index;
/// Creating linking files ('re-export' modules)
pub mod linking_file;
/// Managing the pesde manifest
pub mod manifest;
/// Multi-threading utilities
pub mod multithread;
/// Creating, parsing, and validating package names
pub mod package_name;
/// Managing patches
pub mod patches;
/// Managing pesde projects
pub mod project;
/// The folder that contains shared packages
pub const PACKAGES_FOLDER: &str = "packages";
/// The folder that contains dev packages
pub const DEV_PACKAGES_FOLDER: &str = "dev_packages";
/// The folder that contains server packages
pub const SERVER_PACKAGES_FOLDER: &str = "server_packages";
/// The folder that contains the packages index (where every package is stored after being downloaded)
pub const INDEX_FOLDER: &str = "pesde_index";
/// The name of the manifest file
pub const MANIFEST_FILE_NAME: &str = "pesde.yaml";
/// The name of the lockfile
pub const LOCKFILE_FILE_NAME: &str = "pesde-lock.yaml";
/// The name of the patches folder
pub const PATCHES_FOLDER: &str = "patches";
/// Files to be ignored when publishing
pub const IGNORED_FOLDERS: &[&str] = &[
PACKAGES_FOLDER,
DEV_PACKAGES_FOLDER,
SERVER_PACKAGES_FOLDER,
".git",
];
const USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));

284
src/linking_file.rs Normal file
View file

@ -0,0 +1,284 @@
use std::{
fs::{read, write},
iter,
path::{Component, Path, PathBuf},
str::from_utf8,
};
use full_moon::{
ast::types::ExportedTypeDeclaration,
parse,
visitors::{Visit, Visitor},
};
use log::debug;
use semver::Version;
use thiserror::Error;
use crate::{
dependencies::resolution::{packages_folder, ResolvedPackage, ResolvedVersionsMap},
index::Index,
manifest::{Manifest, ManifestReadError, PathStyle, Realm},
package_name::PackageName,
project::Project,
};
struct TypeVisitor {
pub(crate) types: Vec<String>,
}
impl Visitor for TypeVisitor {
fn visit_exported_type_declaration(&mut self, node: &ExportedTypeDeclaration) {
let name = node.type_declaration().type_name().to_string();
let (declaration_generics, generics) =
if let Some(declaration) = node.type_declaration().generics() {
let mut declaration_generics = vec![];
let mut generics = vec![];
for generic in declaration.generics().iter() {
declaration_generics.push(generic.to_string());
if generic.default_type().is_some() {
generics.push(generic.parameter().to_string())
} else {
generics.push(generic.to_string())
}
}
(
format!("<{}>", declaration_generics.join(", ")),
format!("<{}>", generics.join(", ")),
)
} else {
("".to_string(), "".to_string())
};
self.types.push(format!(
"export type {name}{declaration_generics} = module.{name}{generics}\n"
));
}
}
/// Generates the contents of a linking file, given the require path, and the contents of the target file
/// The contents will be scanned for type exports, and the linking file will be generated accordingly
pub fn linking_file(content: &str, path: &str) -> Result<String, full_moon::Error> {
let mut linker = format!("local module = require({path})\n");
let mut visitor = TypeVisitor { types: vec![] };
parse(content)?.nodes().visit(&mut visitor);
for ty in visitor.types {
linker.push_str(&ty);
}
linker.push_str("return module");
Ok(linker)
}
#[derive(Debug, Error)]
/// An error that occurred while linking dependencies
pub enum LinkingError {
#[error("error interacting with the file system")]
/// An error that occurred while interacting with the file system
Io(#[from] std::io::Error),
#[error("failed getting file name from {0}")]
/// An error that occurred while getting a file name
FileNameFail(PathBuf),
#[error("failed converting file name to string")]
/// An error that occurred while converting a file name to a string
FileNameToStringFail,
#[error("failed getting relative path from {0} to {1}")]
/// An error that occurred while getting a relative path
RelativePathFail(PathBuf, PathBuf),
#[error("failed getting path parent of {0}")]
/// An error that occurred while getting a path parent
ParentFail(PathBuf),
#[error("failed to convert path component to string")]
/// An error that occurred while converting a path component to a string
ComponentToStringFail,
#[error("failed to get path string")]
/// An error that occurred while getting a path string
PathToStringFail,
#[error("error encoding utf-8 string")]
/// An error that occurred while converting a byte slice to a string
Utf8(#[from] std::str::Utf8Error),
#[error("error reading manifest")]
/// An error that occurred while reading the manifest of a package
ManifestRead(#[from] ManifestReadError),
#[error("missing realm {0} in-game path")]
/// An error that occurred while getting the in-game path for a realm
MissingRealmInGamePath(Realm),
#[error("library source is not valid Luau")]
/// An error that occurred because the library source is not valid Luau
InvalidLuau(#[from] full_moon::Error),
}
pub(crate) fn link<P: AsRef<Path>, Q: AsRef<Path>, I: Index>(
project: &Project<I>,
resolved_pkg: &ResolvedPackage,
destination_dir: P,
parent_dependency_packages_dir: Q,
) -> Result<(), LinkingError> {
let (_, source_dir) = resolved_pkg.directory(project.path());
let file = Manifest::from_path(&source_dir)?;
let Some(lib_export) = file.exports.lib else {
return Ok(());
};
let lib_export = lib_export.to_path(&source_dir);
let path_style = &project.manifest().path_style;
let PathStyle::Roblox { place } = &path_style;
debug!("linking {resolved_pkg} using `{}` path style", path_style);
let name = resolved_pkg.pkg_ref.name().name();
let destination_dir = if resolved_pkg.is_root {
project.path().join(packages_folder(
&resolved_pkg.specifier.realm().cloned().unwrap_or_default(),
))
} else {
destination_dir.as_ref().to_path_buf()
};
let destination_file = destination_dir.join(format!("{name}.lua"));
let realm_folder = project.path().join(resolved_pkg.packages_folder());
let in_different_folders = realm_folder != parent_dependency_packages_dir.as_ref();
let mut path = if in_different_folders {
pathdiff::diff_paths(&source_dir, &realm_folder)
.ok_or_else(|| LinkingError::RelativePathFail(source_dir.clone(), realm_folder))?
} else {
pathdiff::diff_paths(&source_dir, &destination_dir).ok_or_else(|| {
LinkingError::RelativePathFail(source_dir.clone(), destination_dir.to_path_buf())
})?
};
path.set_extension("");
let beginning = if in_different_folders {
place
.get(&resolved_pkg.realm)
.ok_or_else(|| LinkingError::MissingRealmInGamePath(resolved_pkg.realm))?
.clone()
} else if name == "init" {
"script".to_string()
} else {
"script.Parent".to_string()
};
let path = iter::once(Ok(beginning))
.chain(path.components().map(|component| {
Ok(match component {
Component::ParentDir => ".Parent".to_string(),
Component::Normal(part) => format!(
"[{:?}]",
part.to_str().ok_or(LinkingError::ComponentToStringFail)?
),
_ => unreachable!("invalid path component"),
})
}))
.collect::<Result<String, LinkingError>>()?;
debug!(
"writing linking file for {} with import `{path}` to {}",
source_dir.display(),
destination_file.display()
);
let raw_file_contents = read(lib_export)?;
let file_contents = from_utf8(&raw_file_contents)?;
let linking_file_contents = linking_file(file_contents, &path)?;
write(&destination_file, linking_file_contents)?;
Ok(())
}
#[derive(Debug, Error)]
#[error("error linking {1}@{2} to {3}@{4}")]
/// An error that occurred while linking the dependencies
pub struct LinkingDependenciesError(
#[source] LinkingError,
PackageName,
Version,
PackageName,
Version,
);
impl<I: Index> Project<I> {
/// Links the dependencies of the project
pub fn link_dependencies(
&self,
map: &ResolvedVersionsMap,
) -> Result<(), LinkingDependenciesError> {
for (name, versions) in map {
for (version, resolved_pkg) in versions {
let (container_dir, _) = resolved_pkg.directory(self.path());
debug!(
"linking package {name}@{version}'s dependencies to directory {}",
container_dir.display()
);
for (dep_name, dep_version) in &resolved_pkg.dependencies {
let dep = map
.get(dep_name)
.and_then(|versions| versions.get(dep_version))
.unwrap();
link(
self,
dep,
&container_dir,
&self.path().join(resolved_pkg.packages_folder()),
)
.map_err(|e| {
LinkingDependenciesError(
e,
dep_name.clone(),
dep_version.clone(),
name.clone(),
version.clone(),
)
})?;
}
if resolved_pkg.is_root {
let linking_dir = &self.path().join(resolved_pkg.packages_folder());
debug!(
"linking root package {name}@{version} to directory {}",
linking_dir.display()
);
link(self, resolved_pkg, linking_dir, linking_dir).map_err(|e| {
LinkingDependenciesError(
e,
name.clone(),
version.clone(),
name.clone(),
version.clone(),
)
})?;
}
}
}
Ok(())
}
}

217
src/main.rs Normal file
View file

@ -0,0 +1,217 @@
use std::{
fs::{create_dir_all, read},
hash::{DefaultHasher, Hash, Hasher},
path::PathBuf,
};
use auth_git2::GitAuthenticator;
use clap::{Parser, Subcommand};
use directories::ProjectDirs;
use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper;
use keyring::Entry;
use pretty_env_logger::env_logger::Env;
use reqwest::header::{ACCEPT, AUTHORIZATION};
use serde::{Deserialize, Serialize};
use cli::{
auth::{auth_command, AuthCommand},
config::{config_command, ConfigCommand},
root::root_command,
};
use pesde::index::GitIndex;
mod cli;
#[derive(Subcommand)]
pub enum Command {
/// Installs the dependencies of the project
Install {
#[clap(long, short)]
locked: bool,
},
/// Runs the `bin` export of the specified package
Run {
#[clap(value_name = "PACKAGE")]
package: String,
#[clap(last = true)]
args: Vec<String>,
},
/// Searches for a package on the registry
Search {
#[clap(value_name = "QUERY")]
query: Option<String>,
},
/// Publishes the project to the registry
Publish,
/// Begins a new patch
Patch { package: String },
/// Commits (finished) the patch
PatchCommit {
#[clap(value_name = "DIRECTORY")]
dir: PathBuf,
},
/// Auth-related commands
Auth {
#[clap(subcommand)]
command: AuthCommand,
},
/// Config-related commands
Config {
#[clap(subcommand)]
command: ConfigCommand,
},
}
#[derive(Parser)]
struct Cli {
#[clap(subcommand)]
command: Command,
#[arg(short, long, value_name = "DIRECTORY")]
directory: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Clone)]
struct CliConfig {
index_repo_url: String,
cache_dir: Option<PathBuf>,
}
impl CliConfig {
fn cache_dir(&self, directories: &ProjectDirs) -> PathBuf {
self.cache_dir
.clone()
.unwrap_or_else(|| directories.cache_dir().to_path_buf())
}
}
struct CliParams {
index: GitIndex,
api_token_entry: Entry,
reqwest_client: reqwest::blocking::Client,
cli_config: CliConfig,
cwd: PathBuf,
multi: MultiProgress,
directories: ProjectDirs,
}
impl CliConfig {
fn write(&self, directories: &ProjectDirs) -> anyhow::Result<()> {
let cli_config_path = directories.config_dir().join("config.yaml");
serde_yaml::to_writer(
&mut std::fs::File::create(cli_config_path.as_path())?,
&self,
)?;
Ok(())
}
}
#[macro_export]
macro_rules! send_request {
($req:expr) => {{
let res = $req.send()?;
match res.error_for_status_ref() {
Ok(_) => res,
Err(e) => {
panic!("request failed: {e}\nbody: {}", res.text()?);
}
}
}};
}
fn main() -> anyhow::Result<()> {
let logger = pretty_env_logger::formatted_builder()
.parse_env(Env::default().default_filter_or("info"))
.build();
let multi = MultiProgress::new();
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
let cli = Cli::parse();
let directories = ProjectDirs::from("com", env!("CARGO_BIN_NAME"), env!("CARGO_BIN_NAME"))
.expect("couldn't get home directory");
let cli_config_path = directories.config_dir().join("config.yaml");
let cli_config = if cli_config_path.exists() {
serde_yaml::from_slice(&read(cli_config_path.as_path())?)?
} else {
let config = CliConfig {
index_repo_url: "https://github.com/daimond113/pesde-index".to_string(),
cache_dir: None,
};
create_dir_all(directories.config_dir())?;
config.write(&directories)?;
config
};
let cwd_buf = cli
.directory
.or(std::env::current_dir().ok())
.ok_or(anyhow::anyhow!("couldn't get current directory"))?;
let api_token_entry = Entry::new(env!("CARGO_BIN_NAME"), "api_token")?;
let mut hasher = DefaultHasher::new();
cli_config.index_repo_url.hash(&mut hasher);
let hash = hasher.finish().to_string();
let index = GitIndex::new(
cli_config.cache_dir(&directories).join("index").join(hash),
&cli_config.index_repo_url,
Some(Box::new(|| {
Box::new(|a, b, c| {
let git_authenticator = GitAuthenticator::new();
let config = git2::Config::open_default().unwrap();
let mut cred = git_authenticator.credentials(&config);
cred(a, b, c)
})
})),
);
index.refresh()?;
let mut header_map = reqwest::header::HeaderMap::new();
header_map.insert(ACCEPT, "application/json".parse()?);
header_map.insert(
AUTHORIZATION,
format!("Bearer {}", api_token_entry.get_password()?).parse()?,
);
header_map.insert("X-GitHub-Api-Version", "2022-11-28".parse()?);
let reqwest_client = reqwest::blocking::Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.default_headers(header_map)
.build()?;
let params = CliParams {
index,
api_token_entry,
reqwest_client,
cli_config,
cwd: cwd_buf,
multi,
directories,
};
match cli.command {
Command::Auth { command } => auth_command(command, params),
Command::Config { command } => config_command(command, params),
cmd => root_command(cmd, params),
}
}

180
src/manifest.rs Normal file
View file

@ -0,0 +1,180 @@
use std::{collections::BTreeMap, fmt::Display, fs::read};
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{dependencies::DependencySpecifier, package_name::PackageName, MANIFEST_FILE_NAME};
/// The files exported by the package
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(deny_unknown_fields)]
pub struct Exports {
/// Points to the file which exports the package. As of currently this is only used for re-exporting types.
/// Libraries must have a structure in Roblox where the main file becomes the folder, for example:
/// A package called pesde/lib has a file called src/main.lua.
/// Pesde puts this package in a folder called pesde_lib.
/// The package has to have set up configuration for file-syncing tools such as Rojo so that src/main.lua becomes the pesde_lib and turns it into a ModuleScript
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lib: Option<RelativePathBuf>,
/// Points to the file that will be executed with Lune
#[serde(default, skip_serializing_if = "Option::is_none")]
pub bin: Option<RelativePathBuf>,
}
/// The path style used by the package
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub enum PathStyle {
/// The path style used by Roblox (e.g. `script.Parent` or `script.Parent.Parent`)
Roblox {
/// A map of realm to in-game package folder location (used for linking between packages in different realms)
#[serde(default)]
place: BTreeMap<Realm, String>,
},
}
impl Default for PathStyle {
fn default() -> Self {
PathStyle::Roblox {
place: BTreeMap::new(),
}
}
}
impl Display for PathStyle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PathStyle::Roblox { .. } => write!(f, "roblox"),
}
}
}
/// The realm of the package
#[derive(
Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Copy, Default,
)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub enum Realm {
/// The package is shared (usually ReplicatedStorage)
#[default]
Shared,
/// The package is server only (usually ServerScriptService/ServerStorage)
Server,
/// The package is development only
Development,
}
impl Realm {
/// Returns the most restrictive realm
pub fn or<'a>(&'a self, other: &'a Self) -> &'a Self {
match self {
Realm::Shared => other,
_ => self,
}
}
}
impl Display for Realm {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Realm::Shared => write!(f, "shared"),
Realm::Server => write!(f, "server"),
Realm::Development => write!(f, "development"),
}
}
}
/// The manifest of a package
#[derive(Serialize, Deserialize, Debug, Clone)]
// #[serde(deny_unknown_fields)]
pub struct Manifest {
/// The name of the package
pub name: PackageName,
/// The version of the package. Must be [semver](https://semver.org) compatible. The registry will not accept non-semver versions and the CLI will not handle such packages
pub version: Version,
/// The files exported by the package
#[serde(default)]
pub exports: Exports,
/// The path style to use for linking modules
#[serde(default)]
pub path_style: PathStyle,
/// Whether the package is private (it should not be published)
#[serde(default)]
pub private: bool,
/// The realm of the package
pub realm: Option<Realm>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dependencies: Vec<DependencySpecifier>,
/// The peer dependencies of the package
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub peer_dependencies: Vec<DependencySpecifier>,
/// A short description of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// The license of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
/// The authors of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub authors: Option<Vec<String>>,
}
/// An error that occurred while reading the manifest
#[derive(Debug, Error)]
pub enum ManifestReadError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the manifest
#[error("error deserializing manifest")]
ManifestDeser(#[source] serde_yaml::Error),
}
/// The type of dependency
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[serde(rename_all = "snake_case")]
pub enum DependencyType {
/// A normal dependency
#[default]
Normal,
/// A peer dependency
Peer,
}
impl Manifest {
/// Reads a manifest from a path (if the path is a directory, it will look for the manifest file inside it, otherwise it will read the file directly)
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Result<Self, ManifestReadError> {
let path = path.as_ref();
let path = if path.file_name() == Some(MANIFEST_FILE_NAME.as_ref()) {
path.to_path_buf()
} else {
path.join(MANIFEST_FILE_NAME)
};
let raw_contents = read(path)?;
let manifest =
serde_yaml::from_slice(&raw_contents).map_err(ManifestReadError::ManifestDeser)?;
Ok(manifest)
}
/// Returns all dependencies
pub fn dependencies(&self) -> Vec<(DependencySpecifier, DependencyType)> {
self.dependencies
.iter()
.map(|dep| (dep.clone(), DependencyType::Normal))
.chain(
self.peer_dependencies
.iter()
.map(|dep| (dep.clone(), DependencyType::Peer)),
)
.collect()
}
}

33
src/multithread.rs Normal file
View file

@ -0,0 +1,33 @@
use std::sync::mpsc::Receiver;
use threadpool::ThreadPool;
/// A multithreaded job
pub struct MultithreadedJob<E> {
pub(crate) progress: Receiver<Result<(), E>>,
pub(crate) pool: ThreadPool,
}
impl<E> MultithreadedJob<E> {
pub(crate) fn new() -> (Self, std::sync::mpsc::Sender<Result<(), E>>) {
let (tx, rx) = std::sync::mpsc::channel();
let pool = ThreadPool::new(6);
(Self { progress: rx, pool }, tx)
}
/// Returns the progress of the job
pub fn progress(&self) -> &Receiver<Result<(), E>> {
&self.progress
}
/// Waits for the job to finish
pub fn wait(self) -> Result<(), E> {
self.pool.join();
for result in self.progress {
result?;
}
Ok(())
}
}

151
src/package_name.rs Normal file
View file

@ -0,0 +1,151 @@
use std::{fmt::Display, str::FromStr};
use serde::{de::Visitor, Deserialize, Serialize};
use thiserror::Error;
/// A package name
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct PackageName(String, String);
/// An error that occurred while validating a package name part (scope or name)
#[derive(Debug, Error)]
pub enum PackageNameValidationError {
/// The package name part is empty
#[error("package name part cannot be empty")]
EmptyPart,
/// The package name part contains invalid characters (only lowercase ASCII characters, numbers, and underscores are allowed)
#[error("package name {0} part can only contain lowercase ASCII characters, numbers, and underscores")]
InvalidPart(String),
/// The package name part is too long (it cannot be longer than 24 characters)
#[error("package name {0} part cannot be longer than 24 characters")]
PartTooLong(String),
}
/// Validates a package name part (scope or name)
pub fn validate_part(part: &str) -> Result<(), PackageNameValidationError> {
if part.is_empty() {
return Err(PackageNameValidationError::EmptyPart);
}
if !part
.chars()
.all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '_')
{
return Err(PackageNameValidationError::InvalidPart(part.to_string()));
}
if part.len() > 24 {
return Err(PackageNameValidationError::PartTooLong(part.to_string()));
}
Ok(())
}
const SEPARATOR: char = '/';
const ESCAPED_SEPARATOR: char = '-';
/// An error that occurred while parsing an escaped package name
#[derive(Debug, Error)]
pub enum EscapedPackageNameError {
/// This is not a valid escaped package name
#[error("package name is not in the format `scope{ESCAPED_SEPARATOR}name`")]
Invalid,
/// The package name is invalid
#[error("invalid package name")]
InvalidName(#[from] PackageNameValidationError),
}
impl PackageName {
/// Creates a new package name
pub fn new(scope: &str, name: &str) -> Result<Self, PackageNameValidationError> {
validate_part(scope)?;
validate_part(name)?;
Ok(Self(scope.to_string(), name.to_string()))
}
/// Parses an escaped package name
pub fn from_escaped(s: &str) -> Result<Self, EscapedPackageNameError> {
let (scope, name) = s
.split_once(ESCAPED_SEPARATOR)
.ok_or(EscapedPackageNameError::Invalid)?;
Ok(Self::new(scope, name)?)
}
/// Gets the scope of the package name
pub fn scope(&self) -> &str {
&self.0
}
/// Gets the name of the package name
pub fn name(&self) -> &str {
&self.1
}
/// Gets the escaped form (for use in file names, etc.) of the package name
pub fn escaped(&self) -> String {
format!("{}{ESCAPED_SEPARATOR}{}", self.0, self.1)
}
/// Gets the parts of the package name
pub fn parts(&self) -> (&str, &str) {
(&self.0, &self.1)
}
}
impl Display for PackageName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}{SEPARATOR}{}", self.0, self.1)
}
}
/// An error that occurred while parsing a package name
#[derive(Debug, Error)]
pub enum FromStrPackageNameParseError {
/// This is not a valid package name
#[error("package name is not in the format `scope{SEPARATOR}name`")]
Invalid,
/// The package name is invalid
#[error("invalid name part")]
InvalidPart(#[from] PackageNameValidationError),
}
impl FromStr for PackageName {
type Err = FromStrPackageNameParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts: Vec<&str> = s.split(SEPARATOR).collect();
if parts.len() != 2 {
return Err(FromStrPackageNameParseError::Invalid);
}
Ok(PackageName::new(parts[0], parts[1])?)
}
}
impl Serialize for PackageName {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.to_string())
}
}
struct PackageNameVisitor;
impl<'de> Visitor<'de> for PackageNameVisitor {
type Value = PackageName;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a string in the format `scope{SEPARATOR}name`")
}
fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
v.parse().map_err(|e| E::custom(e))
}
}
impl<'de> Deserialize<'de> for PackageName {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<PackageName, D::Error> {
deserializer.deserialize_str(PackageNameVisitor)
}
}

210
src/patches.rs Normal file
View file

@ -0,0 +1,210 @@
use std::{
fs::{read, read_dir},
path::{Path, PathBuf},
};
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use log::debug;
use semver::Version;
use thiserror::Error;
use crate::{
dependencies::resolution::ResolvedVersionsMap, index::Index, package_name::PackageName,
project::Project, PATCHES_FOLDER,
};
fn make_signature<'a>() -> Result<Signature<'a>, git2::Error> {
Signature::now(
env!("CARGO_PKG_NAME"),
concat!(env!("CARGO_PKG_NAME"), "@localhost"),
)
}
/// Sets up a patches repository in the specified directory
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
let repo = Repository::init(&dir)?;
{
let signature = make_signature()?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let oid = index.write_tree()?;
let tree = repo.find_tree(oid)?;
repo.commit(Some("HEAD"), &signature, &signature, "original", &tree, &[])?;
}
Ok(repo)
}
/// An error that occurred while creating patches
#[derive(Debug, Error)]
pub enum CreatePatchError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while getting a file name
#[error("failed to get file name from {0}")]
FileNameFail(PathBuf),
/// An error that occurred while stripping a prefix
#[error("error stripping prefix {1} from path {2}")]
StripPrefixFail(#[source] std::path::StripPrefixError, PathBuf, PathBuf),
}
/// Creates a patch for the package in the specified directory
pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, CreatePatchError> {
let mut patches = vec![];
let repo = Repository::open(dir.as_ref())?;
let original = repo.head()?.peel_to_tree()?;
let diff = repo.diff_tree_to_workdir(Some(&original), None)?;
diff.print(DiffFormat::Patch, |_delta, _hunk, line| {
match line.origin_value() {
DiffLineType::Context | DiffLineType::Addition | DiffLineType::Deletion => {
let origin = line.origin();
let mut buffer = vec![0; origin.len_utf8()];
origin.encode_utf8(&mut buffer);
patches.extend(buffer);
}
_ => {}
}
patches.extend(line.content());
true
})?;
Ok(patches)
}
/// An error that occurred while applying patches
#[derive(Debug, Error)]
pub enum ApplyPatchesError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while getting a file name
#[error("failed to get file name from {0}")]
FileNameFail(PathBuf),
/// An error that occurred while converting a path to a string
#[error("failed to convert path to string")]
ToStringFail,
/// An error that occurred because a patch name was malformed
#[error("malformed patch name {0}")]
MalformedPatch(String),
/// An error that occurred while parsing a package name
#[error("failed to parse package name {0}")]
PackageNameParse(#[from] crate::package_name::EscapedPackageNameError),
/// An error that occurred while getting a file stem
#[error("failed to get file stem")]
FileStemFail,
/// An error that occurred while reading a file
#[error("failed to read file")]
ReadFail,
/// An error that occurred because a package was not found in the dependencies
#[error("package {0} not found in the lockfile")]
PackageNotFound(PackageName),
/// An error that occurred because a version was not found for a package
#[error("version {0} not found for package {1}")]
VersionNotFound(Version, PackageName),
/// An error that occurred while parsing a version
#[error("failed to parse version")]
VersionParse(#[from] semver::Error),
/// An error that occurred while stripping a prefix
#[error("strip prefix error")]
StripPrefixFail(#[from] std::path::StripPrefixError),
}
impl<I: Index> Project<I> {
/// Applies patches for the project
pub fn apply_patches(&self, map: &ResolvedVersionsMap) -> Result<(), ApplyPatchesError> {
let patches_dir = self.path().join(PATCHES_FOLDER);
if !patches_dir.exists() {
return Ok(());
}
for file in read_dir(&patches_dir)? {
let file = file?;
if !file.file_type()?.is_file() {
continue;
}
let path = file.path();
let dir_name = path
.file_name()
.ok_or_else(|| ApplyPatchesError::FileNameFail(path.clone()))?;
let dir_name = dir_name.to_str().ok_or(ApplyPatchesError::ToStringFail)?;
let (package_name, version) = dir_name
.strip_suffix(".patch")
.unwrap_or(dir_name)
.split_once('@')
.ok_or_else(|| ApplyPatchesError::MalformedPatch(dir_name.to_string()))?;
let package_name = PackageName::from_escaped(package_name)?;
let version = Version::parse(version)?;
let versions = map
.get(&package_name)
.ok_or_else(|| ApplyPatchesError::PackageNotFound(package_name.clone()))?;
let resolved_pkg = versions.get(&version).ok_or_else(|| {
ApplyPatchesError::VersionNotFound(version.clone(), package_name.clone())
})?;
debug!("resolved package {package_name}@{version} to {resolved_pkg}");
let (_, source_path) = resolved_pkg.directory(self.path());
let diff = Diff::from_buffer(&read(&path)?)?;
let repo = match Repository::open(&source_path) {
Ok(repo) => repo,
Err(_) => setup_patches_repo(&source_path)?,
};
repo.apply(&diff, ApplyLocation::Both, None)?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let signature = make_signature()?;
let tree_id = index.write_tree()?;
let tree = repo.find_tree(tree_id)?;
let parent = repo.head()?.peel_to_commit()?;
repo.commit(
Some("HEAD"),
&signature,
&signature,
"applied patches",
&tree,
&[&parent],
)?;
}
Ok(())
}
}

219
src/project.rs Normal file
View file

@ -0,0 +1,219 @@
use std::{
fmt::Debug,
fs::{read, File},
path::{Path, PathBuf},
};
use thiserror::Error;
use crate::dependencies::DownloadError;
use crate::index::Index;
use crate::linking_file::LinkingDependenciesError;
use crate::{
dependencies::resolution::ResolvedVersionsMap,
manifest::{Manifest, ManifestReadError},
LOCKFILE_FILE_NAME,
};
/// A pesde project
#[derive(Clone, Debug)]
pub struct Project<I: Index> {
path: PathBuf,
cache_path: PathBuf,
index: I,
manifest: Manifest,
registry_auth_token: Option<String>,
}
/// Options for installing a project
pub struct InstallOptions {
locked: bool,
auto_download: bool,
resolved_versions_map: Option<ResolvedVersionsMap>,
}
impl Default for InstallOptions {
fn default() -> Self {
Self {
locked: false,
auto_download: true,
resolved_versions_map: None,
}
}
}
impl InstallOptions {
/// Creates a new set of install options (uses the Default implementation)
pub fn new() -> Self {
Self::default()
}
/// Makes the installation to use the lockfile, and ensure that the lockfile is up-to-date
pub fn locked(&self, locked: bool) -> Self {
Self {
locked,
resolved_versions_map: self.resolved_versions_map.clone(),
..*self
}
}
/// Makes the installation to automatically download the dependencies
/// Having this set to false is only useful if you want to download the dependencies yourself. An example of this is the CLI's progress bar
pub fn auto_download(&self, auto_download: bool) -> Self {
Self {
auto_download,
resolved_versions_map: self.resolved_versions_map.clone(),
..*self
}
}
/// Makes the installation to use the given resolved versions map
/// Having this set to Some is only useful if you're using auto_download = false
pub fn resolved_versions_map(&self, resolved_versions_map: ResolvedVersionsMap) -> Self {
Self {
resolved_versions_map: Some(resolved_versions_map),
..*self
}
}
}
/// An error that occurred while reading the lockfile
#[derive(Debug, Error)]
pub enum ReadLockfileError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the lockfile
#[error("error deserializing lockfile")]
LockfileDeser(#[source] serde_yaml::Error),
}
/// An error that occurred while downloading a project
#[derive(Debug, Error)]
pub enum InstallProjectError {
/// An error that occurred while resolving the dependency tree
#[error("failed to resolve dependency tree")]
ResolveTree(#[from] crate::dependencies::resolution::ResolveError),
/// An error that occurred while downloading a package
#[error("failed to download package")]
DownloadPackage(#[from] DownloadError),
/// An error that occurred while applying patches
#[error("error applying patches")]
ApplyPatches(#[from] crate::patches::ApplyPatchesError),
/// An error that occurred while linking dependencies
#[error("failed to link dependencies")]
Linking(#[from] LinkingDependenciesError),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while writing the lockfile
#[error("failed to write lockfile")]
LockfileSer(#[source] serde_yaml::Error),
}
impl<I: Index> Project<I> {
/// Creates a new project
pub fn new<P: AsRef<Path>, Q: AsRef<Path>>(
path: P,
cache_path: Q,
index: I,
manifest: Manifest,
registry_auth_token: Option<String>,
) -> Self {
Self {
path: path.as_ref().to_path_buf(),
cache_path: cache_path.as_ref().to_path_buf(),
index,
manifest,
registry_auth_token,
}
}
/// Creates a new project from a path (manifest will be read from the path)
pub fn from_path<P: AsRef<Path>, Q: AsRef<Path>>(
path: P,
cache_path: Q,
index: I,
registry_auth_token: Option<String>,
) -> Result<Self, ManifestReadError> {
let manifest = Manifest::from_path(path.as_ref())?;
Ok(Self::new(
path,
cache_path,
index,
manifest,
registry_auth_token,
))
}
/// Returns the index of the project
pub fn index(&self) -> &I {
&self.index
}
/// Returns the manifest of the project
pub fn manifest(&self) -> &Manifest {
&self.manifest
}
/// Returns the cache directory of the project
pub fn cache_dir(&self) -> &Path {
&self.cache_path
}
/// Returns the path of the project
pub fn path(&self) -> &Path {
&self.path
}
/// Returns the registry auth token of the project
pub fn registry_auth_token(&self) -> Option<&String> {
self.registry_auth_token.as_ref()
}
/// Returns the lockfile of the project
pub fn lockfile(&self) -> Result<Option<ResolvedVersionsMap>, ReadLockfileError> {
let lockfile_path = self.path.join(LOCKFILE_FILE_NAME);
Ok(if lockfile_path.exists() {
let lockfile_contents = read(&lockfile_path)?;
let lockfile: ResolvedVersionsMap = serde_yaml::from_slice(&lockfile_contents)
.map_err(ReadLockfileError::LockfileDeser)?;
Some(lockfile)
} else {
None
})
}
/// Downloads the project's dependencies, applies patches, and links the dependencies
pub fn install(&self, install_options: InstallOptions) -> Result<(), InstallProjectError> {
let map = match install_options.resolved_versions_map {
Some(map) => map,
None => self
.manifest
.dependency_tree(self, install_options.locked)?,
};
if install_options.auto_download {
self.download(&map)?.wait()?;
}
self.apply_patches(&map)?;
self.link_dependencies(&map)?;
if !install_options.locked {
serde_yaml::to_writer(File::create(self.path.join(LOCKFILE_FILE_NAME))?, &map)
.map_err(InstallProjectError::LockfileSer)?;
}
Ok(())
}
}

99
tests/prelude.rs Normal file
View file

@ -0,0 +1,99 @@
use std::{
collections::{BTreeSet, HashMap},
sync::Arc,
};
use pesde::{
index::{
ConfigError, CreatePackageVersionError, CredentialsFn, Index, IndexConfig, IndexFile,
IndexFileEntry, IndexPackageError, ScopeOwners, ScopeOwnersError,
},
manifest::Manifest,
package_name::PackageName,
};
/// An in-memory implementation of the [`Index`] trait. Used for testing.
#[derive(Debug, Clone, Default)]
pub struct InMemoryIndex {
packages: HashMap<String, (BTreeSet<u64>, IndexFile)>,
}
impl InMemoryIndex {
pub fn new() -> Self {
Self::default()
}
pub fn with_scope(mut self, scope: &str, owners: BTreeSet<u64>) -> Self {
self.packages
.insert(scope.to_string(), (owners, IndexFile::default()));
self
}
pub fn with_package(mut self, scope: &str, index_file: IndexFileEntry) -> Self {
self.packages
.entry(scope.to_string())
.or_insert_with(|| (BTreeSet::new(), IndexFile::default()))
.1
.push(index_file);
self
}
}
impl Index for InMemoryIndex {
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError> {
Ok(self.packages.get(scope).map(|(owners, _)| owners).cloned())
}
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError> {
self.packages
.insert(scope.to_string(), (owners.clone(), IndexFile::default()));
Ok(true)
}
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError> {
Ok(self
.packages
.get(name.scope())
.map(|(_, file)| file.clone()))
}
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<bool, CreatePackageVersionError> {
let scope = manifest.name.scope();
if let Some(owners) = self.scope_owners(scope)? {
if !owners.contains(uploader) {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
} else if !self.create_scope_for(scope, &BTreeSet::from([*uploader]))? {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
let package = self.packages.get_mut(scope).unwrap();
package.1.push(manifest.clone().into());
Ok(true)
}
fn config(&self) -> Result<IndexConfig, ConfigError> {
Ok(IndexConfig {
download: None,
api: "http://127.0.0.1:8080".to_string(),
github_oauth_client_id: "".to_string(),
custom_registry_allowed: false,
git_allowed: false,
})
}
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>> {
None
}
}

118
tests/resolver.rs Normal file
View file

@ -0,0 +1,118 @@
use std::collections::BTreeSet;
use semver::Version;
use tempfile::tempdir;
use pesde::{
dependencies::{
registry::{RegistryDependencySpecifier, RegistryPackageRef},
resolution::ResolvedPackage,
DependencySpecifier, PackageRef,
},
manifest::{DependencyType, Manifest, Realm},
package_name::PackageName,
project::Project,
};
use prelude::*;
mod prelude;
#[test]
fn test_resolves_package() {
let dir = tempdir().unwrap();
let dir_path = dir.path().to_path_buf();
let index = InMemoryIndex::new();
let version_str = "0.1.0";
let version: Version = version_str.parse().unwrap();
let version_2_str = "0.1.1";
let version_2: Version = version_2_str.parse().unwrap();
let description = "test package";
let pkg_name = PackageName::new("test", "test").unwrap();
let pkg_manifest = Manifest {
name: pkg_name.clone(),
version: version.clone(),
exports: Default::default(),
path_style: Default::default(),
private: true,
realm: None,
dependencies: vec![],
peer_dependencies: vec![],
description: Some(description.to_string()),
license: None,
authors: None,
};
let mut pkg_2_manifest = pkg_manifest.clone();
pkg_2_manifest.version = version_2.clone();
let index = index
.with_scope(pkg_name.scope(), BTreeSet::from([0]))
.with_package(pkg_name.scope(), pkg_manifest.into())
.with_package(pkg_name.scope(), pkg_2_manifest.into());
let specifier = DependencySpecifier::Registry(RegistryDependencySpecifier {
name: pkg_name.clone(),
version: format!("={version_str}").parse().unwrap(),
realm: None,
});
let specifier_2 = DependencySpecifier::Registry(RegistryDependencySpecifier {
name: pkg_name.clone(),
version: format!(">{version_str}").parse().unwrap(),
realm: None,
});
let user_manifest = Manifest {
name: "test/user".parse().unwrap(),
version: version.clone(),
exports: Default::default(),
path_style: Default::default(),
private: true,
realm: None,
dependencies: vec![specifier.clone()],
peer_dependencies: vec![specifier_2.clone()],
description: Some(description.to_string()),
license: None,
authors: None,
};
let project = Project::new(&dir_path, &dir_path, index, user_manifest, None);
let tree = project.manifest().dependency_tree(&project, false).unwrap();
assert_eq!(tree.len(), 1);
let versions = tree.get(&pkg_name).unwrap();
assert_eq!(versions.len(), 2);
let resolved_pkg = versions.get(&version).unwrap();
assert_eq!(
resolved_pkg,
&ResolvedPackage {
pkg_ref: PackageRef::Registry(RegistryPackageRef {
name: pkg_name.clone(),
version: version.clone(),
}),
specifier,
dependencies: Default::default(),
is_root: true,
realm: Realm::Shared,
dep_type: DependencyType::Normal,
}
);
let resolved_pkg_2 = versions.get(&version_2).unwrap();
assert_eq!(
resolved_pkg_2,
&ResolvedPackage {
pkg_ref: PackageRef::Registry(RegistryPackageRef {
name: pkg_name.clone(),
version: version_2.clone(),
}),
specifier: specifier_2,
dependencies: Default::default(),
is_root: true,
realm: Realm::Shared,
dep_type: DependencyType::Normal,
}
);
}