feat: begin rewrite

This commit is contained in:
daimond113 2024-07-13 00:09:37 +02:00
parent 88e87b03b9
commit b73bf418c5
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
82 changed files with 3090 additions and 11134 deletions

4728
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,17 +1,20 @@
[package]
name = "pesde"
version = "0.4.7"
version = "0.5.0-dev.0"
edition = "2021"
license = "MIT"
authors = ["daimond113 <contact@daimond113.com>"]
description = "A package manager for Roblox"
description = "A package manager for Luau"
homepage = "https://pesde.daimond113.com"
repository = "https://github.com/daimond113/pesde"
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
[features]
bin = ["clap", "directories", "keyring", "anyhow", "ignore", "pretty_env_logger", "reqwest/json", "reqwest/multipart", "lune", "futures-executor", "indicatif", "auth-git2", "indicatif-log-bridge", "inquire", "once_cell"]
wally = ["toml", "zip"]
bin = ["clap", "directories", "ignore", "pretty_env_logger", "reqwest/json", "reqwest/multipart", "indicatif", "indicatif-log-bridge", "inquire", "nondestructive", "colored", "anyhow", "keyring", "open"]
wally-compat = ["toml", "zip"]
roblox = []
lune = []
luau = []
[[bin]]
name = "pesde"
@ -19,51 +22,46 @@ path = "src/main.rs"
required-features = ["bin"]
[dependencies]
serde = { version = "1.0.197", features = ["derive"] }
serde_yaml = "0.9.33"
serde_json = "1.0.114"
git2 = "0.18.3"
semver = { version = "1.0.22", features = ["serde"] }
reqwest = { version = "0.12.1", default-features = false, features = ["rustls-tls", "blocking"] }
tar = "0.4.40"
flate2 = "1.0.28"
serde = { version = "1.0.204", features = ["derive"] }
serde_yaml = "0.9.34"
serde_json = "1.0.120"
serde_with = "3.8.3"
gix = { version = "0.63.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "serde"] }
semver = { version = "1.0.23", features = ["serde"] }
reqwest = { version = "0.12.5", default-features = false, features = ["rustls-tls", "blocking"] }
tar = "0.4.41"
flate2 = "1.0.30"
pathdiff = "0.2.1"
relative-path = { version = "1.9.2", features = ["serde"] }
log = "0.4.21"
thiserror = "1.0.58"
relative-path = { version = "1.9.3", features = ["serde"] }
log = "0.4.22"
thiserror = "1.0.62"
threadpool = "1.8.1"
full_moon = { version = "0.19.0", features = ["stacker", "roblox"] }
url = { version = "2.5.0", features = ["serde"] }
full_moon = { version = "1.0.0-rc.5", features = ["luau"] }
url = { version = "2.5.2", features = ["serde"] }
cfg-if = "1.0.0"
once_cell = "1.19.0"
secrecy = "0.8.0"
chrono = { version = "0.4.38", features = ["serde"] }
toml = { version = "0.8.12", optional = true }
zip = { version = "0.6.6", optional = true }
toml = { version = "0.8.14", optional = true }
zip = { version = "2.1.3", optional = true }
# chrono-lc breaks because of https://github.com/chronotope/chrono/compare/v0.4.34...v0.4.35#diff-67de5678fb5c14378bbff7ecf7f8bfab17cc223c4726f8da3afca183a4e59543
chrono = { version = "=0.4.34", features = ["serde"] }
clap = { version = "4.5.3", features = ["derive"], optional = true }
anyhow = { version = "1.0.86", optional = true }
open = { version = "5.3.0", optional = true }
keyring = { version = "3.0.1", features = ["crypto-rust", "windows-native", "apple-native", "linux-native"], optional = true }
colored = { version = "2.1.0", optional = true }
nondestructive = { version = "0.0.25", optional = true }
clap = { version = "4.5.9", features = ["derive"], optional = true }
directories = { version = "5.0.1", optional = true }
keyring = { version = "2.3.2", optional = true }
anyhow = { version = "1.0.81", optional = true }
ignore = { version = "0.4.22", optional = true }
pretty_env_logger = { version = "0.5.0", optional = true }
lune = { version = "0.8.2", optional = true }
futures-executor = { version = "0.3.30", optional = true }
indicatif = { version = "0.17.8", optional = true }
auth-git2 = { version = "0.5.4", optional = true }
indicatif-log-bridge = { version = "0.2.2", optional = true }
inquire = { version = "0.7.3", optional = true }
once_cell = { version = "1.19.0", optional = true }
[dev-dependencies]
tempfile = "3.10.1"
inquire = { version = "0.7.5", optional = true }
[workspace]
resolver = "2"
members = [
"registry"
]
members = []
[profile.dev.package.full_moon]
opt-level = 3
opt-level = 3

View file

@ -6,6 +6,10 @@
<br>
# Important
> pesde is currently being rewritten, and this new version is not yet ready for use. You can find the stable version in the `master` branch.
pesde is a package manager for Roblox that is designed to be feature-rich and easy to use.
Currently, pesde is in a very early stage of development, but already supports the following features:
@ -17,7 +21,8 @@ Currently, pesde is in a very early stage of development, but already supports t
## Installation
pesde can be installed from GitHub Releases. You can find the latest release [here](https://github.com/daimond113/pesde/releases).
pesde can be installed from GitHub Releases. You can find the latest
release [here](https://github.com/daimond113/pesde/releases).
It can also be installed by using [Aftman](https://github.com/LPGhatguy/aftman).
## Usage
@ -52,13 +57,16 @@ pesde run daimond113/pesde -- --help
## Preparing to publish
To publish you must first initialize a new project with `pesde init`. You can then use the other commands to manipulate dependencies, and edit the file
To publish you must first initialize a new project with `pesde init`. You can then use the other commands to manipulate
dependencies, and edit the file
manually to add metadata such as authors, description, and license.
> **Warning**
> The pesde CLI respects the `.gitignore` file and will not include files that are ignored. The `.pesdeignore` file has more power over the `.gitignore` file, so you can unignore files by prepending a `!` to the pattern.
> The pesde CLI respects the `.gitignore` file and will not include files that are ignored. The `.pesdeignore` file has
> more power over the `.gitignore` file, so you can unignore files by prepending a `!` to the pattern.
The pesde CLI supports the `.pesdeignore` file, which is similar to `.gitignore`. It can be used to include or exclude files from the package.
The pesde CLI supports the `.pesdeignore` file, which is similar to `.gitignore`. It can be used to include or exclude
files from the package.
## Documentation
@ -70,9 +78,11 @@ The main pesde registry is hosted on [fly.io](https://fly.io). You can find it a
### Self-hosting
You can self-host the registry by using the default implementation in the `registry` folder, or by creating your own implementation. The API
You can self-host the registry by using the default implementation in the `registry` folder, or by creating your own
implementation. The API
must be compatible with the default implementation, which can be found in the `main.rs` file.
## Previous art
pesde is heavily inspired by [npm](https://www.npmjs.com/), [pnpm](https://pnpm.io/), [Wally](https://wally.run), and [Cargo](https://doc.rust-lang.org/cargo/).
pesde is heavily inspired by [npm](https://www.npmjs.com/), [pnpm](https://pnpm.io/), [Wally](https://wally.run),
and [Cargo](https://doc.rust-lang.org/cargo/).

View file

@ -1,11 +0,0 @@
INDEX_REPO_URL=# url of the git repository to be used as the package index
S3_ENDPOINT=# endpoint of the s3 bucket
S3_BUCKET_NAME=# name of the s3 bucket
S3_REGION=# region of the s3 bucket
S3_ACCESS_KEY=# access key of the s3 bucket
S3_SECRET_KEY=# secret key of the s3 bucket
COMMITTER_GIT_NAME=# name of the committer used for index updates
COMMITTER_GIT_EMAIL=# email of the committer used for index updates
GITHUB_USERNAME=# username of github account with push access to the index repository
GITHUB_PAT=# personal access token of github account with push access to the index repository
SENTRY_URL=# optional url of sentry error tracking

View file

@ -1,33 +0,0 @@
[package]
name = "pesde-registry"
version = "0.6.1"
edition = "2021"
[dependencies]
actix-web = "4.5.1"
actix-cors = "0.7.0"
actix-web-httpauth = "0.8.1"
actix-multipart = "0.6.1"
actix-multipart-derive = "0.6.1"
actix-governor = "0.5.0"
dotenvy = "0.15.7"
reqwest = { version = "0.12.1", features = ["json", "blocking"] }
rusty-s3 = "0.5.0"
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114"
serde_yaml = "0.9.33"
flate2 = "1.0.28"
tar = "0.4.40"
pesde = { path = ".." }
semver = "1.0.22"
git2 = "0.18.3"
thiserror = "1.0.58"
tantivy = "0.21.1"
log = "0.4.21"
pretty_env_logger = "0.5.0"
sentry = "0.32.2"
sentry-log = "0.32.2"
sentry-actix = "0.32.2"
# zstd-sys v2.0.10 is broken: https://github.com/gyscos/zstd-rs/issues/268
zstd-sys = "=2.0.9"

View file

@ -1,2 +0,0 @@
pub mod packages;
pub mod search;

View file

@ -1,256 +0,0 @@
use actix_multipart::form::{bytes::Bytes, MultipartForm};
use actix_web::{web, HttpResponse, Responder};
use flate2::read::GzDecoder;
use log::error;
use reqwest::StatusCode;
use rusty_s3::S3Action;
use tantivy::{doc, DateTime, Term};
use tar::Archive;
use pesde::{
dependencies::DependencySpecifier, index::Index, manifest::Manifest,
package_name::StandardPackageName, project::DEFAULT_INDEX_NAME, IGNORED_FOLDERS,
MANIFEST_FILE_NAME,
};
use crate::{commit_signature, errors, AppState, UserId, S3_EXPIRY};
#[derive(MultipartForm)]
pub struct CreateForm {
#[multipart(limit = "4 MiB")]
tarball: Bytes,
}
pub async fn create_package(
form: MultipartForm<CreateForm>,
app_state: web::Data<AppState>,
user_id: web::ReqData<UserId>,
) -> Result<impl Responder, errors::Errors> {
let bytes = form.tarball.data.as_ref().to_vec();
let mut decoder = GzDecoder::new(bytes.as_slice());
let mut archive = Archive::new(&mut decoder);
let archive_entries = archive.entries()?.filter_map(|e| e.ok());
let mut manifest = None;
for mut e in archive_entries {
let Ok(path) = e.path() else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Attached file contains non-UTF-8 path".to_string(),
}));
};
let Some(path) = path.as_os_str().to_str() else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Attached file contains non-UTF-8 path".to_string(),
}));
};
match path {
MANIFEST_FILE_NAME => {
if !e.header().entry_type().is_file() {
continue;
}
let received_manifest: Manifest =
serde_yaml::from_reader(&mut e).map_err(errors::Errors::UserYaml)?;
manifest = Some(received_manifest);
}
path => {
if e.header().entry_type().is_file() {
continue;
}
if IGNORED_FOLDERS.contains(&path) {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Attached file contains forbidden directory {}", path),
}));
}
}
}
}
let Some(manifest) = manifest else {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Attached file doesn't contain {MANIFEST_FILE_NAME}"),
}));
};
let (scope, name) = manifest.name.parts();
let entry = {
let mut index = app_state.index.lock().unwrap();
let config = index.config()?;
for (dependency, _) in manifest.dependencies().into_values() {
match dependency {
DependencySpecifier::Git(_) => {
if !config.git_allowed {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Git dependencies are not allowed on this registry".to_string(),
}));
}
}
DependencySpecifier::Registry(registry) => {
if index
.package(&registry.name.clone().into())
.unwrap()
.is_none()
{
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!("Dependency {} not found", registry.name),
}));
}
if registry.index != DEFAULT_INDEX_NAME && !config.custom_registry_allowed {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: "Custom registries are not allowed on this registry".to_string(),
}));
}
}
#[allow(unreachable_patterns)]
_ => {}
};
}
match index.create_package_version(&manifest, &user_id.0)? {
Some(entry) => {
index.commit_and_push(
&format!("Add version {}@{}", manifest.name, manifest.version),
&commit_signature(),
)?;
entry
}
None => {
return Ok(HttpResponse::BadRequest().json(errors::ErrorResponse {
error: format!(
"Version {} of {} already exists",
manifest.version, manifest.name
),
}));
}
}
};
{
let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema();
let name_field = schema.get_field("name").unwrap();
search_writer.delete_term(Term::from_field_text(
name_field,
&manifest.name.to_string(),
));
search_writer.add_document(
doc!(
name_field => manifest.name.to_string(),
schema.get_field("version").unwrap() => manifest.version.to_string(),
schema.get_field("description").unwrap() => manifest.description.unwrap_or_default(),
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp())
)
).unwrap();
search_writer.commit().unwrap();
}
let url = app_state
.s3_bucket
.put_object(
Some(&app_state.s3_credentials),
&format!("{scope}-{name}-{}.tar.gz", manifest.version),
)
.sign(S3_EXPIRY);
app_state.reqwest_client.put(url).body(bytes).send().await?;
Ok(HttpResponse::Ok().body(format!(
"Successfully published {}@{}",
manifest.name, manifest.version
)))
}
pub async fn get_package_version(
app_state: web::Data<AppState>,
path: web::Path<(String, String, String)>,
) -> Result<impl Responder, errors::Errors> {
let (scope, name, mut version) = path.into_inner();
let package_name = StandardPackageName::new(&scope, &name)?;
{
let index = app_state.index.lock().unwrap();
match index.package(&package_name.clone().into())? {
Some(package) => {
if version == "latest" {
version = package.last().map(|v| v.version.to_string()).unwrap();
} else if !package.iter().any(|v| v.version.to_string() == version) {
return Ok(HttpResponse::NotFound().finish());
}
}
None => return Ok(HttpResponse::NotFound().finish()),
}
}
let url = app_state
.s3_bucket
.get_object(
Some(&app_state.s3_credentials),
&format!("{scope}-{name}-{version}.tar.gz"),
)
.sign(S3_EXPIRY);
let response = match app_state
.reqwest_client
.get(url)
.send()
.await?
.error_for_status()
{
Ok(response) => response,
Err(e) => {
if let Some(status) = e.status() {
if status == StatusCode::NOT_FOUND {
error!(
"package {}@{} not found in S3, but found in index",
package_name, version
);
return Ok(HttpResponse::InternalServerError().finish());
}
}
return Err(e.into());
}
};
Ok(HttpResponse::Ok().body(response.bytes().await?))
}
pub async fn get_package_versions(
app_state: web::Data<AppState>,
path: web::Path<(String, String)>,
) -> Result<impl Responder, errors::Errors> {
let (scope, name) = path.into_inner();
let package_name = StandardPackageName::new(&scope, &name)?;
{
let index = app_state.index.lock().unwrap();
match index.package(&package_name.into())? {
Some(package) => {
let versions = package
.iter()
.map(|v| (v.version.to_string(), v.published_at.timestamp()))
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(versions))
}
None => Ok(HttpResponse::NotFound().finish()),
}
}
}

View file

@ -1,81 +0,0 @@
use actix_web::{web, Responder};
use semver::Version;
use serde::Deserialize;
use serde_json::{json, Value};
use tantivy::{query::AllQuery, DateTime, DocAddress, Order};
use pesde::{index::Index, package_name::StandardPackageName};
use crate::{errors, AppState};
#[derive(Deserialize)]
pub struct Query {
query: Option<String>,
}
pub async fn search_packages(
app_state: web::Data<AppState>,
query: web::Query<Query>,
) -> Result<impl Responder, errors::Errors> {
let searcher = app_state.search_reader.searcher();
let schema = searcher.schema();
let name = schema.get_field("name").unwrap();
let version = schema.get_field("version").unwrap();
let description = schema.get_field("description").unwrap();
let query = query.query.as_deref().unwrap_or_default().trim();
let query_parser =
tantivy::query::QueryParser::for_index(searcher.index(), vec![name, description]);
let query = if query.is_empty() {
Box::new(AllQuery)
} else {
query_parser.parse_query(query)?
};
let top_docs: Vec<(DateTime, DocAddress)> = searcher
.search(
&query,
&tantivy::collector::TopDocs::with_limit(10)
.order_by_fast_field("published_at", Order::Desc),
)
.unwrap();
{
let index = app_state.index.lock().unwrap();
Ok(web::Json(
top_docs
.into_iter()
.map(|(published_at, doc_address)| {
let retrieved_doc = searcher.doc(doc_address).unwrap();
let name: StandardPackageName = retrieved_doc
.get_first(name)
.and_then(|v| v.as_text())
.and_then(|v| v.parse().ok())
.unwrap();
let version: Version = retrieved_doc
.get_first(version)
.and_then(|v| v.as_text())
.and_then(|v| v.parse().ok())
.unwrap();
let entry = index
.package(&name.clone().into())
.unwrap()
.and_then(|v| v.into_iter().find(|v| v.version == version))
.unwrap();
json!({
"name": name,
"version": version,
"description": entry.description,
"published_at": published_at.into_timestamp_secs(),
})
})
.collect::<Vec<Value>>(),
))
}
}

View file

@ -1,77 +0,0 @@
use actix_web::{HttpResponse, ResponseError};
use log::error;
use pesde::index::CreatePackageVersionError;
use serde::Serialize;
use thiserror::Error;
#[derive(Serialize)]
pub struct ErrorResponse {
pub error: String,
}
#[derive(Debug, Error)]
pub enum Errors {
#[error("io error")]
Io(#[from] std::io::Error),
#[error("user yaml error")]
UserYaml(serde_yaml::Error),
#[error("reqwest error")]
Reqwest(#[from] reqwest::Error),
#[error("package name invalid")]
PackageName(#[from] pesde::package_name::StandardPackageNameValidationError),
#[error("config error")]
Config(#[from] pesde::index::ConfigError),
#[error("create package version error")]
CreatePackageVersion(#[from] CreatePackageVersionError),
#[error("commit and push error")]
CommitAndPush(#[from] pesde::index::CommitAndPushError),
#[error("index package error")]
IndexPackage(#[from] pesde::index::IndexPackageError),
#[error("error parsing query")]
QueryParser(#[from] tantivy::query::QueryParserError),
}
impl ResponseError for Errors {
fn error_response(&self) -> HttpResponse {
match self {
Errors::UserYaml(_) | Errors::PackageName(_) | Errors::QueryParser(_) => {}
Errors::CreatePackageVersion(err) => match err {
CreatePackageVersionError::MissingScopeOwnership => {
return HttpResponse::Unauthorized().json(ErrorResponse {
error: "You do not have permission to publish this scope".to_string(),
});
}
CreatePackageVersionError::FromManifestIndexFileEntry(err) => {
return HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Error in manifest: {err:?}"),
});
}
_ => error!("{err:?}"),
},
err => {
error!("{err:?}");
}
}
match self {
Errors::UserYaml(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Error parsing YAML file: {err}"),
}),
Errors::PackageName(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Invalid package name: {err}"),
}),
Errors::QueryParser(err) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("Error parsing query: {err}"),
}),
_ => HttpResponse::InternalServerError().finish(),
}
}
}

View file

@ -1,316 +0,0 @@
use std::{fs::read_dir, sync::Mutex, time::Duration};
use actix_cors::Cors;
use actix_governor::{Governor, GovernorConfigBuilder, KeyExtractor, SimpleKeyExtractionError};
use actix_web::{
dev::ServiceRequest,
error::ErrorUnauthorized,
middleware::{Compress, Condition, Logger},
rt::System,
web, App, Error, HttpMessage, HttpServer,
};
use actix_web_httpauth::{extractors::bearer::BearerAuth, middleware::HttpAuthentication};
use dotenvy::dotenv;
use git2::{Cred, Signature};
use log::info;
use reqwest::{header::AUTHORIZATION, Client};
use rusty_s3::{Bucket, Credentials, UrlStyle};
use tantivy::{doc, DateTime, IndexReader, IndexWriter};
use pesde::{
index::{GitIndex, Index, IndexFile},
package_name::StandardPackageName,
};
mod endpoints;
mod errors;
const S3_EXPIRY: Duration = Duration::from_secs(60 * 60);
struct AppState {
s3_bucket: Bucket,
s3_credentials: Credentials,
reqwest_client: Client,
index: Mutex<GitIndex>,
search_reader: IndexReader,
search_writer: Mutex<IndexWriter>,
}
macro_rules! get_env {
($name:expr, "p") => {
std::env::var($name)
.expect(concat!("Environment variable `", $name, "` must be set"))
.parse()
.expect(concat!(
"Environment variable `",
$name,
"` must be a valid value"
))
};
($name:expr) => {
std::env::var($name).expect(concat!("Environment variable `", $name, "` must be set"))
};
($name:expr, $default:expr, "p") => {
std::env::var($name)
.unwrap_or($default.to_string())
.parse()
.expect(concat!(
"Environment variable `",
$name,
"` must a valid value"
))
};
($name:expr, $default:expr) => {
std::env::var($name).unwrap_or($default.to_string())
};
}
pub fn commit_signature<'a>() -> Signature<'a> {
Signature::now(
&get_env!("COMMITTER_GIT_NAME"),
&get_env!("COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub struct UserId(pub u64);
async fn validator(
req: ServiceRequest,
credentials: BearerAuth,
) -> Result<ServiceRequest, (Error, ServiceRequest)> {
let token = credentials.token();
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
let Ok(user_info) = app_state
.reqwest_client
.get("https://api.github.com/user")
.header(AUTHORIZATION, format!("Bearer {}", token))
.send()
.await
.map(|r| r.json::<serde_json::Value>())
else {
return Err((ErrorUnauthorized("Failed to fetch user info"), req));
};
let Ok(user_info) = user_info.await else {
return Err((ErrorUnauthorized("Failed to parse user info"), req));
};
let Some(id) = user_info["id"].as_u64() else {
return Err((ErrorUnauthorized("Failed to fetch user info"), req));
};
req.extensions_mut().insert(UserId(id));
Ok(req)
}
#[derive(Debug, Clone)]
struct UserIdKey;
impl KeyExtractor for UserIdKey {
type Key = UserId;
type KeyExtractionError = SimpleKeyExtractionError<&'static str>;
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
Ok(*req.extensions().get::<UserId>().unwrap())
}
}
fn search_index(index: &GitIndex) -> (IndexReader, IndexWriter) {
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
let name =
schema_builder.add_text_field("name", tantivy::schema::TEXT | tantivy::schema::STORED);
let version =
schema_builder.add_text_field("version", tantivy::schema::TEXT | tantivy::schema::STORED);
let description = schema_builder.add_text_field("description", tantivy::schema::TEXT);
let published_at = schema_builder.add_date_field("published_at", tantivy::schema::FAST);
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
let search_reader = search_index
.reader_builder()
.reload_policy(tantivy::ReloadPolicy::OnCommit)
.try_into()
.unwrap();
let mut search_writer = search_index.writer(50_000_000).unwrap();
for entry in read_dir(index.path()).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if !path.is_dir() || path.file_name().is_some_and(|v| v == ".git") {
continue;
}
let scope = path.file_name().and_then(|v| v.to_str()).unwrap();
for entry in read_dir(&path).unwrap() {
let entry = entry.unwrap();
let path = entry.path();
if !path.is_file() || path.extension().is_some() {
continue;
}
let package = path.file_name().and_then(|v| v.to_str()).unwrap();
let package_name = StandardPackageName::new(scope, package).unwrap();
let entries: IndexFile =
serde_yaml::from_slice(&std::fs::read(&path).unwrap()).unwrap();
let entry = entries.last().unwrap().clone();
search_writer
.add_document(doc!(
name => package_name.to_string(),
version => entry.version.to_string(),
description => entry.description.unwrap_or_default(),
published_at => DateTime::from_timestamp_secs(entry.published_at.timestamp()),
))
.unwrap();
}
}
search_writer.commit().unwrap();
(search_reader, search_writer)
}
fn main() -> std::io::Result<()> {
dotenv().ok();
let sentry_url = std::env::var("SENTRY_URL").ok();
let with_sentry = sentry_url.is_some();
let mut log_builder = pretty_env_logger::formatted_builder();
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
if with_sentry {
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
log::set_boxed_logger(Box::new(logger)).unwrap();
log::set_max_level(log::LevelFilter::Info);
} else {
log_builder.try_init().unwrap();
}
let _guard = if let Some(sentry_url) = sentry_url {
std::env::set_var("RUST_BACKTRACE", "1");
Some(sentry::init((
sentry_url,
sentry::ClientOptions {
release: sentry::release_name!(),
..Default::default()
},
)))
} else {
None
};
let address = get_env!("ADDRESS", "127.0.0.1");
let port: u16 = get_env!("PORT", "8080", "p");
let current_dir = std::env::current_dir().unwrap();
let index = GitIndex::new(
current_dir.join("cache"),
&get_env!("INDEX_REPO_URL", "p"),
Some(Box::new(|| {
Box::new(|_, _, _| {
let username = get_env!("GITHUB_USERNAME");
let pat = get_env!("GITHUB_PAT");
Cred::userpass_plaintext(&username, &pat)
})
})),
None,
);
index.refresh().expect("failed to refresh index");
let (search_reader, search_writer) = search_index(&index);
let app_data = web::Data::new(AppState {
s3_bucket: Bucket::new(
get_env!("S3_ENDPOINT", "p"),
UrlStyle::Path,
get_env!("S3_BUCKET_NAME"),
get_env!("S3_REGION"),
)
.unwrap(),
s3_credentials: Credentials::new(get_env!("S3_ACCESS_KEY"), get_env!("S3_SECRET_KEY")),
reqwest_client: Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.build()
.unwrap(),
index: Mutex::new(index),
search_reader,
search_writer: Mutex::new(search_writer),
});
let upload_governor_config = GovernorConfigBuilder::default()
.burst_size(10)
.per_second(600)
.key_extractor(UserIdKey)
.use_headers()
.finish()
.unwrap();
let generic_governor_config = GovernorConfigBuilder::default()
.burst_size(50)
.per_second(10)
.use_headers()
.finish()
.unwrap();
info!("listening on {address}:{port}");
System::new().block_on(async move {
HttpServer::new(move || {
App::new()
.wrap(Condition::new(with_sentry, sentry_actix::Sentry::new()))
.wrap(Logger::default())
.wrap(Cors::permissive())
.wrap(Compress::default())
.app_data(app_data.clone())
.route("/", web::get().to(|| async { env!("CARGO_PKG_VERSION") }))
.service(
web::scope("/v0")
.route(
"/search",
web::get()
.to(endpoints::search::search_packages)
.wrap(Governor::new(&generic_governor_config)),
)
.route(
"/packages/{scope}/{name}/versions",
web::get()
.to(endpoints::packages::get_package_versions)
.wrap(Governor::new(&generic_governor_config)),
)
.route(
"/packages/{scope}/{name}/{version}",
web::get()
.to(endpoints::packages::get_package_version)
.wrap(Governor::new(&generic_governor_config)),
)
.route(
"/packages",
web::post()
.to(endpoints::packages::create_package)
.wrap(Governor::new(&upload_governor_config))
.wrap(HttpAuthentication::bearer(validator)),
),
)
})
.bind((address, port))?
.run()
.await
})
}

View file

@ -1,162 +0,0 @@
use std::path::PathBuf;
use crate::cli::DEFAULT_INDEX_DATA;
use keyring::Entry;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
struct EnvVarApiTokenSource;
const API_TOKEN_ENV_VAR: &str = "PESDE_API_TOKEN";
impl EnvVarApiTokenSource {
fn get_api_token(&self) -> anyhow::Result<Option<String>> {
match std::env::var(API_TOKEN_ENV_VAR) {
Ok(token) => Ok(Some(token)),
Err(std::env::VarError::NotPresent) => Ok(None),
Err(e) => Err(e.into()),
}
}
}
static AUTH_FILE_PATH: Lazy<PathBuf> =
Lazy::new(|| DEFAULT_INDEX_DATA.0.parent().unwrap().join("auth.yaml"));
static AUTH_FILE: Lazy<AuthFile> =
Lazy::new(
|| match std::fs::read_to_string(AUTH_FILE_PATH.to_path_buf()) {
Ok(config) => serde_yaml::from_str(&config).unwrap(),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => AuthFile::default(),
Err(e) => panic!("{:?}", e),
},
);
#[derive(Serialize, Deserialize, Default, Clone)]
struct AuthFile {
#[serde(default)]
api_token: Option<String>,
}
struct ConfigFileApiTokenSource;
impl ConfigFileApiTokenSource {
fn get_api_token(&self) -> anyhow::Result<Option<String>> {
Ok(AUTH_FILE.api_token.clone())
}
fn set_api_token(&self, api_token: &str) -> anyhow::Result<()> {
let mut config = AUTH_FILE.clone();
config.api_token = Some(api_token.to_string());
serde_yaml::to_writer(
&mut std::fs::File::create(AUTH_FILE_PATH.to_path_buf())?,
&config,
)?;
Ok(())
}
fn delete_api_token(&self) -> anyhow::Result<()> {
let mut config = AUTH_FILE.clone();
config.api_token = None;
serde_yaml::to_writer(
&mut std::fs::File::create(AUTH_FILE_PATH.to_path_buf())?,
&config,
)?;
Ok(())
}
}
static KEYRING_ENTRY: Lazy<Entry> =
Lazy::new(|| Entry::new(env!("CARGO_PKG_NAME"), "api_token").unwrap());
struct KeyringApiTokenSource;
impl KeyringApiTokenSource {
fn get_api_token(&self) -> anyhow::Result<Option<String>> {
match KEYRING_ENTRY.get_password() {
Ok(api_token) => Ok(Some(api_token)),
Err(err) => match err {
keyring::Error::NoEntry | keyring::Error::PlatformFailure(_) => Ok(None),
_ => Err(err.into()),
},
}
}
fn set_api_token(&self, api_token: &str) -> anyhow::Result<()> {
KEYRING_ENTRY.set_password(api_token)?;
Ok(())
}
fn delete_api_token(&self) -> anyhow::Result<()> {
KEYRING_ENTRY.delete_password()?;
Ok(())
}
}
#[derive(Debug)]
pub enum ApiTokenSource {
EnvVar,
ConfigFile,
Keyring,
}
impl ApiTokenSource {
pub fn get_api_token(&self) -> anyhow::Result<Option<String>> {
match self {
ApiTokenSource::EnvVar => EnvVarApiTokenSource.get_api_token(),
ApiTokenSource::ConfigFile => ConfigFileApiTokenSource.get_api_token(),
ApiTokenSource::Keyring => KeyringApiTokenSource.get_api_token(),
}
}
pub fn set_api_token(&self, api_token: &str) -> anyhow::Result<()> {
match self {
ApiTokenSource::EnvVar => Ok(()),
ApiTokenSource::ConfigFile => ConfigFileApiTokenSource.set_api_token(api_token),
ApiTokenSource::Keyring => KeyringApiTokenSource.set_api_token(api_token),
}
}
pub fn delete_api_token(&self) -> anyhow::Result<()> {
match self {
ApiTokenSource::EnvVar => Ok(()),
ApiTokenSource::ConfigFile => ConfigFileApiTokenSource.delete_api_token(),
ApiTokenSource::Keyring => KeyringApiTokenSource.delete_api_token(),
}
}
fn persists(&self) -> bool {
!matches!(self, ApiTokenSource::EnvVar)
}
}
pub static API_TOKEN_SOURCE: Lazy<ApiTokenSource> = Lazy::new(|| {
let sources: [ApiTokenSource; 3] = [
ApiTokenSource::EnvVar,
ApiTokenSource::ConfigFile,
ApiTokenSource::Keyring,
];
let mut valid_sources = vec![];
for source in sources {
match source.get_api_token() {
Ok(Some(_)) => return source,
Ok(None) => {
if source.persists() {
valid_sources.push(source);
}
}
Err(e) => {
log::error!("error getting api token: {e}");
}
}
}
valid_sources.pop().unwrap()
});

View file

@ -1,111 +0,0 @@
use clap::Subcommand;
use pesde::index::Index;
use reqwest::{header::AUTHORIZATION, Url};
use crate::cli::{api_token::API_TOKEN_SOURCE, send_request, DEFAULT_INDEX, REQWEST_CLIENT};
#[derive(Subcommand, Clone)]
pub enum AuthCommand {
/// Logs in to the registry
Login,
/// Logs out from the registry
Logout,
}
pub fn auth_command(cmd: AuthCommand) -> anyhow::Result<()> {
match cmd {
AuthCommand::Login => {
let github_oauth_client_id = DEFAULT_INDEX.config()?.github_oauth_client_id;
let response = send_request(REQWEST_CLIENT.post(Url::parse_with_params(
"https://github.com/login/device/code",
&[("client_id", &github_oauth_client_id)],
)?))?
.json::<serde_json::Value>()?;
println!(
"go to {} and enter the code `{}`",
response["verification_uri"], response["user_code"]
);
let mut time_left = response["expires_in"]
.as_i64()
.ok_or(anyhow::anyhow!("couldn't get expires_in"))?;
let interval = std::time::Duration::from_secs(
response["interval"]
.as_u64()
.ok_or(anyhow::anyhow!("couldn't get interval"))?,
);
let device_code = response["device_code"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get device_code"))?;
while time_left > 0 {
std::thread::sleep(interval);
time_left -= interval.as_secs() as i64;
let response = send_request(REQWEST_CLIENT.post(Url::parse_with_params(
"https://github.com/login/oauth/access_token",
&[
("client_id", github_oauth_client_id.as_str()),
("device_code", device_code),
("grant_type", "urn:ietf:params:oauth:grant-type:device_code"),
],
)?))?
.json::<serde_json::Value>()?;
match response
.get("error")
.map(|s| {
s.as_str()
.ok_or(anyhow::anyhow!("couldn't get error as string"))
})
.unwrap_or(Ok(""))?
{
"authorization_pending" => continue,
"slow_down" => {
std::thread::sleep(std::time::Duration::from_secs(5));
continue;
}
"expired_token" => {
break;
}
"access_denied" => {
anyhow::bail!("access denied, re-run the login command");
}
_ => (),
}
if response.get("access_token").is_some() {
let access_token = response["access_token"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get access_token"))?;
API_TOKEN_SOURCE.set_api_token(access_token)?;
let response = send_request(
REQWEST_CLIENT
.get("https://api.github.com/user")
.header(AUTHORIZATION, format!("Bearer {access_token}")),
)?
.json::<serde_json::Value>()?;
let login = response["login"]
.as_str()
.ok_or(anyhow::anyhow!("couldn't get login"))?;
println!("you're now logged in as {login}");
return Ok(());
}
}
anyhow::bail!("code expired, please re-run the login command");
}
AuthCommand::Logout => {
API_TOKEN_SOURCE.delete_api_token()?;
println!("you're now logged out");
}
}
Ok(())
}

181
src/cli/auth/login.rs Normal file
View file

@ -0,0 +1,181 @@
use crate::cli::{auth::get_token_login, read_config, reqwest_client, set_token};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use pesde::{
errors::ManifestReadError,
source::{pesde::PesdePackageSource, PackageSource},
Project,
};
use serde::Deserialize;
use url::Url;
#[derive(Debug, Args)]
pub struct LoginCommand {
/// The index to use. Defaults to `default`, or the configured default index if current directory doesn't have a manifest
#[arg(short, long)]
index: Option<String>,
}
#[derive(Debug, Deserialize)]
struct DeviceCodeResponse {
device_code: String,
user_code: String,
verification_uri: Url,
expires_in: u64,
interval: u64,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case", tag = "error")]
enum AccessTokenError {
AuthorizationPending,
SlowDown { interval: u64 },
ExpiredToken,
AccessDenied,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum AccessTokenResponse {
Success { access_token: String },
Error(AccessTokenError),
}
impl LoginCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
let manifest = match project.deser_manifest() {
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
e => return Err(e.into()),
},
};
let index_url = match &self.index {
Some(index) => match index.parse() {
Ok(url) => Some(url),
Err(_) => None,
},
None => match manifest {
Some(_) => None,
None => Some(read_config(project.data_dir())?.default_index),
},
};
let index_url = match index_url {
Some(url) => url,
None => {
let index_name = self.index.as_deref().unwrap_or("default");
match manifest.unwrap().indices.get(index_name) {
Some(index) => index.clone(),
None => anyhow::bail!("Index {index_name} not found"),
}
}
};
let source = PesdePackageSource::new(
index_url
.as_str()
.try_into()
.context("cannot parse URL to git URL")?,
);
source
.refresh(&project)
.context("failed to refresh index")?;
dbg!(source.all_packages(&project).unwrap());
let config = source
.config(&project)
.context("failed to read index config")?;
let client_id = config.github_oauth_client_id;
let reqwest = reqwest_client(project.data_dir())?;
let response = reqwest
.post(Url::parse_with_params(
"https://github.com/login/device/code",
&[("client_id", &client_id)],
)?)
.send()
.context("failed to send device code request")?
.json::<DeviceCodeResponse>()
.context("failed to parse device code response")?;
println!(
"copy your one-time code: {}\npress enter to open {} in your browser...",
response.user_code.bold(),
response.verification_uri.as_str().blue()
);
{
let mut input = String::new();
std::io::stdin()
.read_line(&mut input)
.context("failed to read input")?;
}
match open::that(response.verification_uri.as_str()) {
Ok(_) => (),
Err(e) => {
eprintln!("failed to open browser: {e}");
}
}
let mut time_left = response.expires_in;
let mut interval = std::time::Duration::from_secs(response.interval);
while time_left > 0 {
std::thread::sleep(interval);
time_left = time_left.saturating_sub(interval.as_secs());
let response = reqwest
.post(Url::parse_with_params(
"https://github.com/login/oauth/access_token",
&[
("client_id", &client_id),
("device_code", &response.device_code),
(
"grant_type",
&"urn:ietf:params:oauth:grant-type:device_code".to_string(),
),
],
)?)
.send()
.context("failed to send access token request")?
.json::<AccessTokenResponse>()
.context("failed to parse access token response")?;
match response {
AccessTokenResponse::Success { access_token } => {
set_token(project.data_dir(), Some(&access_token))?;
println!(
"logged in as {}",
get_token_login(&reqwest, &access_token)?.bold()
);
return Ok(());
}
AccessTokenResponse::Error(e) => match e {
AccessTokenError::AuthorizationPending => continue,
AccessTokenError::SlowDown {
interval: new_interval,
} => {
interval = std::time::Duration::from_secs(new_interval);
continue;
}
AccessTokenError::ExpiredToken => {
break;
}
AccessTokenError::AccessDenied => {
anyhow::bail!("access denied, re-run the login command");
}
},
}
}
anyhow::bail!("code expired, please re-run the login command");
}
}

16
src/cli/auth/logout.rs Normal file
View file

@ -0,0 +1,16 @@
use crate::cli::set_token;
use clap::Args;
use pesde::Project;
#[derive(Debug, Args)]
pub struct LogoutCommand {}
impl LogoutCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
set_token(project.data_dir(), None)?;
println!("logged out");
Ok(())
}
}

49
src/cli/auth/mod.rs Normal file
View file

@ -0,0 +1,49 @@
use anyhow::Context;
use clap::Subcommand;
use pesde::Project;
use serde::Deserialize;
mod login;
mod logout;
mod whoami;
#[derive(Debug, Deserialize)]
struct UserResponse {
login: String,
}
pub fn get_token_login(
reqwest: &reqwest::blocking::Client,
access_token: &str,
) -> anyhow::Result<String> {
let response = reqwest
.get("https://api.github.com/user")
.header("Authorization", format!("Bearer {access_token}"))
.send()
.context("failed to send user request")?
.json::<UserResponse>()
.context("failed to parse user response")?;
Ok(response.login)
}
#[derive(Debug, Subcommand)]
pub enum AuthCommands {
/// Logs in into GitHub, and stores the token
Login(login::LoginCommand),
/// Removes the stored token
Logout(logout::LogoutCommand),
/// Prints the username of the currently logged-in user
#[clap(name = "whoami")]
WhoAmI(whoami::WhoAmICommand),
}
impl AuthCommands {
pub fn run(self, project: Project) -> anyhow::Result<()> {
match self {
AuthCommands::Login(login) => login.run(project),
AuthCommands::Logout(logout) => logout.run(project),
AuthCommands::WhoAmI(whoami) => whoami.run(project),
}
}
}

26
src/cli/auth/whoami.rs Normal file
View file

@ -0,0 +1,26 @@
use crate::cli::{auth::get_token_login, get_token, reqwest_client};
use clap::Args;
use colored::Colorize;
use pesde::Project;
#[derive(Debug, Args)]
pub struct WhoAmICommand {}
impl WhoAmICommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
let token = match get_token(project.data_dir())? {
Some(token) => token,
None => {
println!("not logged in");
return Ok(());
}
};
println!(
"logged in as {}",
get_token_login(&reqwest_client(project.data_dir())?, &token)?.bold()
);
Ok(())
}
}

View file

@ -1,42 +0,0 @@
use std::path::PathBuf;
use clap::Subcommand;
use crate::{cli::CLI_CONFIG, CliConfig};
#[derive(Subcommand, Clone)]
pub enum ConfigCommand {
/// Sets the cache directory
SetCacheDir {
/// The directory to use as the cache directory
#[clap(value_name = "DIRECTORY")]
directory: Option<PathBuf>,
},
/// Gets the cache directory
GetCacheDir,
}
pub fn config_command(cmd: ConfigCommand) -> anyhow::Result<()> {
match cmd {
ConfigCommand::SetCacheDir { directory } => {
let cli_config = CliConfig {
cache_dir: directory,
};
cli_config.write()?;
println!(
"cache directory set to: `{}`",
cli_config.cache_dir().display()
);
}
ConfigCommand::GetCacheDir => {
println!(
"current cache directory: `{}`",
CLI_CONFIG.cache_dir().display()
);
}
}
Ok(())
}

View file

@ -0,0 +1,39 @@
use crate::cli::{read_config, write_config, CliConfig};
use clap::Args;
use pesde::Project;
#[derive(Debug, Args)]
pub struct DefaultIndexCommand {
/// The new index URL to set as default, don't pass any value to check the current default index
#[arg(index = 1)]
index: Option<url::Url>,
/// Resets the default index to the default value
#[arg(short, long, conflicts_with = "index")]
reset: bool,
}
impl DefaultIndexCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
let mut config = read_config(project.data_dir())?;
let index = if self.reset {
Some(CliConfig::default().default_index)
} else {
self.index
};
match index {
Some(index) => {
config.default_index = index.clone();
write_config(project.data_dir(), &config)?;
println!("default index set to: {}", index);
}
None => {
println!("current default index: {}", config.default_index);
}
}
Ok(())
}
}

18
src/cli/config/mod.rs Normal file
View file

@ -0,0 +1,18 @@
use clap::Subcommand;
use pesde::Project;
mod default_index;
#[derive(Debug, Subcommand)]
pub enum ConfigCommands {
/// Configuration for the default index
DefaultIndex(default_index::DefaultIndexCommand),
}
impl ConfigCommands {
pub fn run(self, project: Project) -> anyhow::Result<()> {
match self {
ConfigCommands::DefaultIndex(default_index) => default_index.run(project),
}
}
}

View file

@ -1,295 +1,145 @@
use crate::cli::{api_token::API_TOKEN_SOURCE, auth::AuthCommand, config::ConfigCommand};
use auth_git2::GitAuthenticator;
use clap::{Parser, Subcommand};
use directories::ProjectDirs;
use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper;
use log::error;
use once_cell::sync::Lazy;
use pesde::{
index::{GitIndex, Index},
manifest::{Manifest, Realm},
package_name::{PackageName, StandardPackageName},
project::DEFAULT_INDEX_NAME,
};
use pretty_env_logger::env_logger::Env;
use reqwest::{
blocking::{RequestBuilder, Response},
header::ACCEPT,
};
use semver::{Version, VersionReq};
use clap::Subcommand;
use anyhow::Context;
use keyring::Entry;
use pesde::Project;
use serde::{Deserialize, Serialize};
use std::{
fs::create_dir_all,
hash::{DefaultHasher, Hash, Hasher},
path::PathBuf,
str::FromStr,
};
use std::path::Path;
pub mod api_token;
pub mod auth;
pub mod config;
pub mod root;
mod auth;
mod config;
#[derive(Debug, Clone)]
pub struct VersionedPackageName<V: FromStr<Err = semver::Error>>(PackageName, V);
impl<V: FromStr<Err = semver::Error>> FromStr for VersionedPackageName<V> {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (name, version) = s.split_once('@').ok_or_else(|| {
anyhow::anyhow!("invalid package name: {s}; expected format: name@version")
})?;
Ok(VersionedPackageName(
name.to_string().parse()?,
version.parse()?,
))
}
}
#[derive(Subcommand, Clone)]
pub enum Command {
/// Initializes a manifest file
Init,
/// Adds a package to the manifest
Add {
/// The package to add
#[clap(value_name = "PACKAGE")]
package: VersionedPackageName<VersionReq>,
/// Whether the package is a peer dependency
#[clap(long, short)]
peer: bool,
/// The realm of the package
#[clap(long, short)]
realm: Option<Realm>,
},
/// Removes a package from the manifest
Remove {
/// The package to remove
#[clap(value_name = "PACKAGE")]
package: PackageName,
},
/// Lists outdated packages
Outdated,
/// Installs the dependencies of the project
Install {
/// Whether to use the lockfile for resolving dependencies
#[clap(long, short)]
locked: bool,
},
/// Runs the `bin` export of the specified package
Run {
/// The package to run
#[clap(value_name = "PACKAGE")]
package: Option<StandardPackageName>,
/// The arguments to pass to the package
#[clap(last = true)]
args: Vec<String>,
},
/// Searches for a package on the registry
Search {
/// The query to search for
#[clap(value_name = "QUERY")]
query: Option<String>,
},
/// Publishes the project to the registry
Publish,
/// Converts a `wally.toml` file to a `pesde.yaml` file
#[cfg(feature = "wally")]
Convert,
/// Begins a new patch
Patch {
/// The package to patch
#[clap(value_name = "PACKAGE")]
package: VersionedPackageName<Version>,
},
/// Commits (finishes) the patch
PatchCommit {
/// The package's changed directory
#[clap(value_name = "DIRECTORY")]
dir: PathBuf,
},
/// Auth-related commands
Auth {
#[clap(subcommand)]
command: AuthCommand,
},
/// Config-related commands
Config {
#[clap(subcommand)]
command: ConfigCommand,
},
}
#[derive(Parser, Clone)]
#[clap(version = env!("CARGO_PKG_VERSION"))]
pub struct Cli {
#[clap(subcommand)]
pub command: Command,
/// The directory to run the command in
#[arg(short, long, value_name = "DIRECTORY")]
pub directory: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Clone, Default)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CliConfig {
pub cache_dir: Option<PathBuf>,
pub default_index: url::Url,
pub token: Option<String>,
}
impl CliConfig {
pub fn cache_dir(&self) -> PathBuf {
self.cache_dir
.clone()
.unwrap_or_else(|| DIRS.cache_dir().to_path_buf())
}
pub fn open() -> anyhow::Result<Self> {
let cli_config_path = DIRS.config_dir().join("config.yaml");
if cli_config_path.exists() {
Ok(serde_yaml::from_slice(&std::fs::read(cli_config_path)?)?)
} else {
let config = CliConfig::default();
config.write()?;
Ok(config)
}
}
pub fn write(&self) -> anyhow::Result<()> {
let folder = DIRS.config_dir();
create_dir_all(folder)?;
serde_yaml::to_writer(
&mut std::fs::File::create(folder.join("config.yaml"))?,
&self,
)?;
Ok(())
}
}
pub fn send_request(request_builder: RequestBuilder) -> anyhow::Result<Response> {
let res = request_builder.send()?;
match res.error_for_status_ref() {
Ok(_) => Ok(res),
Err(e) => {
error!("request failed: {e}\nbody: {}", res.text()?);
Err(e.into())
impl Default for CliConfig {
fn default() -> Self {
Self {
default_index: "https://github.com/daimond113/pesde-index".parse().unwrap(),
token: None,
}
}
}
pub static CLI: Lazy<Cli> = Lazy::new(Cli::parse);
pub fn read_config(data_dir: &Path) -> anyhow::Result<CliConfig> {
let config_string = match std::fs::read_to_string(data_dir.join("config.yaml")) {
Ok(config_string) => config_string,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Ok(CliConfig::default());
}
Err(e) => return Err(e).context("failed to read config file"),
};
pub static DIRS: Lazy<ProjectDirs> = Lazy::new(|| {
ProjectDirs::from("com", env!("CARGO_PKG_NAME"), env!("CARGO_BIN_NAME"))
.expect("couldn't get home directory")
});
let config = serde_yaml::from_str(&config_string).context("failed to parse config file")?;
pub static CLI_CONFIG: Lazy<CliConfig> = Lazy::new(|| CliConfig::open().unwrap());
Ok(config)
}
pub static CWD: Lazy<PathBuf> = Lazy::new(|| {
CLI.directory
.clone()
.or(std::env::current_dir().ok())
.expect("couldn't get current directory")
});
pub fn write_config(data_dir: &Path, config: &CliConfig) -> anyhow::Result<()> {
let config_string = serde_yaml::to_string(config).context("failed to serialize config")?;
std::fs::write(data_dir.join("config.yaml"), config_string)
.context("failed to write config file")?;
pub static REQWEST_CLIENT: Lazy<reqwest::blocking::Client> = Lazy::new(|| {
let mut header_map = reqwest::header::HeaderMap::new();
header_map.insert(ACCEPT, "application/json".parse().unwrap());
header_map.insert("X-GitHub-Api-Version", "2022-11-28".parse().unwrap());
Ok(())
}
if let Ok(Some(token)) = API_TOKEN_SOURCE.get_api_token() {
header_map.insert(
pub fn get_token(data_dir: &Path) -> anyhow::Result<Option<String>> {
match std::env::var("PESDE_TOKEN") {
Ok(token) => return Ok(Some(token)),
Err(std::env::VarError::NotPresent) => {}
Err(e) => return Err(e.into()),
}
let config = read_config(data_dir)?;
if let Some(token) = config.token {
return Ok(Some(token));
}
match Entry::new("token", env!("CARGO_PKG_NAME")) {
Ok(entry) => match entry.get_password() {
Ok(token) => return Ok(Some(token)),
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()),
},
Err(keyring::Error::PlatformFailure(_)) => {}
Err(e) => return Err(e.into()),
}
Ok(None)
}
pub fn set_token(data_dir: &Path, token: Option<&str>) -> anyhow::Result<()> {
let entry = match Entry::new("token", env!("CARGO_PKG_NAME")) {
Ok(entry) => entry,
Err(e) => return Err(e.into()),
};
let result = if let Some(token) = token {
entry.set_password(token)
} else {
entry.delete_credential()
};
match result {
Ok(()) => return Ok(()),
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()),
}
let mut config = read_config(data_dir)?;
config.token = token.map(|s| s.to_string());
write_config(data_dir, &config)?;
Ok(())
}
pub fn reqwest_client(data_dir: &Path) -> anyhow::Result<reqwest::blocking::Client> {
let mut headers = reqwest::header::HeaderMap::new();
if let Some(token) = get_token(data_dir)? {
headers.insert(
reqwest::header::AUTHORIZATION,
format!("Bearer {token}").parse().unwrap(),
format!("Bearer {}", token)
.parse()
.context("failed to create auth header")?,
);
}
reqwest::blocking::Client::builder()
headers.insert(
reqwest::header::ACCEPT,
"application/json"
.parse()
.context("failed to create accept header")?,
);
Ok(reqwest::blocking::Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.default_headers(header_map)
.build()
.unwrap()
});
pub static MULTI: Lazy<MultiProgress> = Lazy::new(|| {
let logger = pretty_env_logger::formatted_builder()
.parse_env(Env::default().default_filter_or("info"))
.build();
let multi = MultiProgress::new();
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
multi
});
pub const DEFAULT_INDEX_URL: &str = "https://github.com/daimond113/pesde-index";
#[cfg(feature = "wally")]
pub const DEFAULT_WALLY_INDEX_URL: &str = "https://github.com/UpliftGames/wally-index";
pub fn index_dir(url: &str) -> PathBuf {
let mut hasher = DefaultHasher::new();
url.hash(&mut hasher);
let hash = hasher.finish().to_string();
CLI_CONFIG
.cache_dir()
.join("indices")
.join(hash)
.join("index")
.default_headers(headers)
.build()?)
}
pub fn clone_index(url: &str) -> GitIndex {
let index = GitIndex::new(
index_dir(url),
&url.parse().unwrap(),
Some(Box::new(|| {
Box::new(|a, b, c| {
let git_authenticator = GitAuthenticator::new();
let config = git2::Config::open_default().unwrap();
let mut cred = git_authenticator.credentials(&config);
#[derive(Debug, Subcommand)]
pub enum SubCommand {
/// Authentication-related commands
#[command(subcommand)]
Auth(auth::AuthCommands),
cred(a, b, c)
})
})),
API_TOKEN_SOURCE.get_api_token().unwrap(),
);
index.refresh().unwrap();
index
/// Configuration-related commands
#[command(subcommand)]
Config(config::ConfigCommands),
}
pub static DEFAULT_INDEX_DATA: Lazy<(PathBuf, String)> = Lazy::new(|| {
let manifest = Manifest::from_path(CWD.to_path_buf())
.map(|m| m.indices.get(DEFAULT_INDEX_NAME).unwrap().clone());
let url = &manifest.unwrap_or(DEFAULT_INDEX_URL.to_string());
(index_dir(url), url.clone())
});
pub static DEFAULT_INDEX: Lazy<GitIndex> = Lazy::new(|| clone_index(&DEFAULT_INDEX_DATA.1));
impl SubCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
match self {
SubCommand::Auth(auth) => auth.run(project),
SubCommand::Config(config) => config.run(project),
}
}
}

View file

@ -1,593 +0,0 @@
use cfg_if::cfg_if;
use chrono::Utc;
use std::{
collections::{BTreeMap, HashMap},
fs::{create_dir_all, read, remove_dir_all, write},
str::FromStr,
time::Duration,
};
use flate2::{write::GzEncoder, Compression};
use futures_executor::block_on;
use ignore::{overrides::OverrideBuilder, WalkBuilder};
use inquire::{validator::Validation, Select, Text};
use log::debug;
use lune::Runtime;
use once_cell::sync::Lazy;
use reqwest::{header::AUTHORIZATION, Url};
use semver::Version;
use serde_json::Value;
use tar::Builder as TarBuilder;
use pesde::{
dependencies::{registry::RegistryDependencySpecifier, DependencySpecifier, PackageRef},
index::Index,
manifest::{Manifest, PathStyle, Realm},
multithread::MultithreadedJob,
package_name::{PackageName, StandardPackageName},
patches::{create_patch, setup_patches_repo},
project::{InstallOptions, Project, DEFAULT_INDEX_NAME},
DEV_PACKAGES_FOLDER, IGNORED_FOLDERS, MANIFEST_FILE_NAME, PACKAGES_FOLDER, PATCHES_FOLDER,
SERVER_PACKAGES_FOLDER,
};
use crate::cli::{
clone_index, send_request, Command, CLI_CONFIG, CWD, DEFAULT_INDEX, DEFAULT_INDEX_URL, DIRS,
MULTI, REQWEST_CLIENT,
};
pub const MAX_ARCHIVE_SIZE: usize = 4 * 1024 * 1024;
fn multithreaded_bar<E: Send + Sync + Into<anyhow::Error> + 'static>(
job: MultithreadedJob<E>,
len: u64,
message: String,
) -> Result<(), anyhow::Error> {
let bar = MULTI.add(
indicatif::ProgressBar::new(len)
.with_style(
indicatif::ProgressStyle::default_bar()
.template("{msg} {bar:40.208/166} {pos}/{len} {percent}% {elapsed_precise}")?,
)
.with_message(message),
);
bar.enable_steady_tick(Duration::from_millis(100));
while let Ok(result) = job.progress().recv() {
result.map_err(Into::into)?;
bar.inc(1);
}
bar.finish_with_message("done");
Ok(())
}
macro_rules! none_if_empty {
($s:expr) => {
if $s.is_empty() {
None
} else {
Some($s)
}
};
}
pub fn root_command(cmd: Command) -> anyhow::Result<()> {
let mut project: Lazy<Project> = Lazy::new(|| {
let manifest = Manifest::from_path(CWD.to_path_buf()).unwrap();
let indices = manifest
.indices
.clone()
.into_iter()
.map(|(k, v)| (k, Box::new(clone_index(&v)) as Box<dyn Index>))
.collect::<HashMap<_, _>>();
Project::new(CWD.to_path_buf(), CLI_CONFIG.cache_dir(), indices, manifest).unwrap()
});
match cmd {
Command::Install { locked } => {
for packages_folder in &[PACKAGES_FOLDER, DEV_PACKAGES_FOLDER, SERVER_PACKAGES_FOLDER] {
if let Err(e) = remove_dir_all(CWD.join(packages_folder)) {
if e.kind() != std::io::ErrorKind::NotFound {
return Err(e.into());
} else {
debug!("no {packages_folder} folder found, skipping removal");
}
};
}
let manifest = project.manifest().clone();
let lockfile = manifest.dependency_graph(&mut project, locked)?;
let download_job = project.download(&lockfile)?;
multithreaded_bar(
download_job,
lockfile.children.values().map(|v| v.len() as u64).sum(),
"Downloading packages".to_string(),
)?;
cfg_if! {
if #[cfg(feature = "wally")] {
let sourcemap_generator = manifest.sourcemap_generator.clone();
}
}
#[allow(unused_variables)]
let convert_job = project.convert_manifests(&lockfile, move |path| {
cfg_if! {
if #[cfg(feature = "wally")] {
if let Some(sourcemap_generator) = &sourcemap_generator {
cfg_if! {
if #[cfg(target_os = "windows")] {
std::process::Command::new("powershell")
.args(["-C", &sourcemap_generator])
.current_dir(path)
.output()
.expect("failed to execute process");
} else {
std::process::Command::new("sh")
.args(["-c", &sourcemap_generator])
.current_dir(path)
.output()
.expect("failed to execute process");
}
}
}
}
}
});
cfg_if! {
if #[cfg(feature = "wally")] {
multithreaded_bar(
convert_job,
lockfile.children.values().flat_map(|v| v.values()).filter(|v| matches!(v.pkg_ref, PackageRef::Git(_) | PackageRef::Wally(_))).count() as u64,
"Converting manifests".to_string(),
)?;
} else {
convert_job?;
}
}
let project = Lazy::force_mut(&mut project);
project.install(
InstallOptions::new()
.locked(locked)
.auto_download(false)
.lockfile(lockfile),
)?;
}
Command::Run { package, args } => {
let bin_path = if let Some(package) = package {
let lockfile = project
.lockfile()?
.ok_or(anyhow::anyhow!("lockfile not found"))?;
let resolved_pkg = lockfile
.children
.get(&package.clone().into())
.and_then(|versions| {
versions
.values()
.find(|pkg_ref| lockfile.root_specifier(pkg_ref).is_some())
})
.ok_or(anyhow::anyhow!(
"package not found in lockfile (or isn't root)"
))?;
let pkg_path = resolved_pkg.directory(project.path()).1;
let manifest = Manifest::from_path(&pkg_path)?;
let Some(bin_path) = manifest.exports.bin else {
anyhow::bail!("no bin found in package");
};
bin_path.to_path(pkg_path)
} else {
let manifest = project.manifest();
let bin_path = manifest
.exports
.bin
.clone()
.ok_or(anyhow::anyhow!("no bin found in package"))?;
bin_path.to_path(project.path())
};
let mut runtime = Runtime::new().with_args(args);
block_on(runtime.run(
bin_path.with_extension("").display().to_string(),
&read(bin_path)?,
))?;
}
Command::Search { query } => {
let config = DEFAULT_INDEX.config()?;
let api_url = config.api();
let response = send_request(REQWEST_CLIENT.get(Url::parse_with_params(
&format!("{}/v0/search", api_url),
&query.map(|q| vec![("query", q)]).unwrap_or_default(),
)?))?
.json::<Value>()?;
for package in response.as_array().unwrap() {
println!(
"{}@{}{}",
package["name"].as_str().unwrap(),
package["version"].as_str().unwrap(),
package["description"]
.as_str()
.map(|d| if d.is_empty() {
d.to_string()
} else {
format!("\n{}\n", d)
})
.unwrap_or_default()
);
}
}
Command::Publish => {
if project.manifest().private {
anyhow::bail!("package is private, cannot publish");
}
let encoder = GzEncoder::new(vec![], Compression::default());
let mut archive = TarBuilder::new(encoder);
let cwd = &CWD.to_path_buf();
let mut walk_builder = WalkBuilder::new(cwd);
walk_builder.add_custom_ignore_filename(".pesdeignore");
let mut overrides = OverrideBuilder::new(cwd);
for packages_folder in IGNORED_FOLDERS {
overrides.add(&format!("!{}", packages_folder))?;
}
walk_builder.overrides(overrides.build()?);
for entry in walk_builder.build() {
let entry = entry?;
let path = entry.path();
let relative_path = path.strip_prefix(cwd)?;
let entry_type = entry
.file_type()
.ok_or(anyhow::anyhow!("failed to get file type"))?;
if relative_path.as_os_str().is_empty() {
continue;
}
if entry_type.is_file() {
archive.append_path_with_name(path, relative_path)?;
} else if entry_type.is_dir() {
archive.append_dir(relative_path, path)?;
}
}
let archive = archive.into_inner()?.finish()?;
if archive.len() > MAX_ARCHIVE_SIZE {
anyhow::bail!(
"archive is too big ({} bytes), max {MAX_ARCHIVE_SIZE}. aborting...",
archive.len()
);
}
let part = reqwest::blocking::multipart::Part::bytes(archive)
.file_name("tarball.tar.gz")
.mime_str("application/gzip")?;
let index = project.indices().get(DEFAULT_INDEX_NAME).unwrap();
let mut request = REQWEST_CLIENT
.post(format!("{}/v0/packages", index.config()?.api()))
.multipart(reqwest::blocking::multipart::Form::new().part("tarball", part));
if let Some(token) = index.registry_auth_token() {
request = request.header(AUTHORIZATION, format!("Bearer {token}"));
} else {
request = request.header(AUTHORIZATION, "");
}
println!("{}", send_request(request)?.text()?);
}
Command::Patch { package } => {
let lockfile = project
.lockfile()?
.ok_or(anyhow::anyhow!("lockfile not found"))?;
let resolved_pkg = lockfile
.children
.get(&package.0)
.and_then(|versions| versions.get(&package.1))
.ok_or(anyhow::anyhow!("package not found in lockfile"))?;
let dir = DIRS
.data_dir()
.join("patches")
.join(format!("{}@{}", package.0.escaped(), package.1))
.join(Utc::now().timestamp().to_string());
if dir.exists() {
anyhow::bail!(
"patch already exists. remove the directory {} to create a new patch",
dir.display()
);
}
create_dir_all(&dir)?;
let project = Lazy::force_mut(&mut project);
let url = resolved_pkg.pkg_ref.resolve_url(project)?;
let index = project.indices().get(DEFAULT_INDEX_NAME).unwrap();
resolved_pkg.pkg_ref.download(
&REQWEST_CLIENT,
index.registry_auth_token().map(|t| t.to_string()),
url.as_ref(),
index.credentials_fn().cloned(),
&dir,
)?;
match &resolved_pkg.pkg_ref {
PackageRef::Git(_) => {}
_ => {
setup_patches_repo(&dir)?;
}
}
println!("done! modify the files in {} and run `{} patch-commit <DIRECTORY>` to commit the changes", dir.display(), env!("CARGO_BIN_NAME"));
}
Command::PatchCommit { dir } => {
let name = dir
.parent()
.and_then(|p| p.file_name())
.and_then(|f| f.to_str())
.unwrap();
let patch_path = project.path().join(PATCHES_FOLDER);
create_dir_all(&patch_path)?;
let patch_path = patch_path.join(format!("{name}.patch"));
if patch_path.exists() {
anyhow::bail!(
"patch already exists. remove the file {} to create a new patch",
patch_path.display()
);
}
let patches = create_patch(&dir)?;
write(&patch_path, patches)?;
remove_dir_all(&dir)?;
println!(
"done! to apply the patch, run `{} install`",
env!("CARGO_BIN_NAME")
);
}
Command::Init => {
if CWD.join(MANIFEST_FILE_NAME).exists() {
anyhow::bail!("manifest already exists");
}
let default_name = CWD.file_name().and_then(|s| s.to_str());
let mut name =
Text::new("What is the name of the package?").with_validator(|name: &str| {
Ok(match StandardPackageName::from_str(name) {
Ok(_) => Validation::Valid,
Err(e) => Validation::Invalid(e.into()),
})
});
if let Some(name_str) = default_name {
name = name.with_initial_value(name_str);
}
let name = name.prompt()?;
let path_style =
Select::new("What style of paths do you want to use?", vec!["roblox"]).prompt()?;
let path_style = match path_style {
"roblox" => PathStyle::Roblox {
place: Default::default(),
},
_ => unreachable!(),
};
let description = Text::new("What is the description of the package?").prompt()?;
let license = Text::new("What is the license of the package?").prompt()?;
let authors = Text::new("Who are the authors of the package? (split using ;)")
.prompt()?
.split(';')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect::<Vec<String>>();
let repository = Text::new("What is the repository of the package?").prompt()?;
let private = Select::new("Is this package private?", vec!["yes", "no"]).prompt()?;
let private = private == "yes";
let realm = Select::new(
"What is the realm of the package?",
vec!["shared", "server", "dev"],
)
.prompt()?;
let realm = match realm {
"shared" => Realm::Shared,
"server" => Realm::Server,
"dev" => Realm::Development,
_ => unreachable!(),
};
let manifest = Manifest {
name: name.parse()?,
version: Version::parse("0.1.0")?,
exports: Default::default(),
path_style,
private,
realm: Some(realm),
indices: BTreeMap::from([(
DEFAULT_INDEX_NAME.to_string(),
DEFAULT_INDEX_URL.to_string(),
)]),
#[cfg(feature = "wally")]
sourcemap_generator: None,
overrides: Default::default(),
dependencies: Default::default(),
peer_dependencies: Default::default(),
description: none_if_empty!(description),
license: none_if_empty!(license),
authors: none_if_empty!(authors),
repository: none_if_empty!(repository),
};
manifest.write(CWD.to_path_buf())?;
}
Command::Add {
package,
realm,
peer,
} => {
let mut manifest = project.manifest().clone();
let specifier = match package.0.clone() {
PackageName::Standard(name) => {
DependencySpecifier::Registry(RegistryDependencySpecifier {
name,
version: package.1,
realm,
index: DEFAULT_INDEX_NAME.to_string(),
})
}
#[cfg(feature = "wally")]
PackageName::Wally(name) => DependencySpecifier::Wally(
pesde::dependencies::wally::WallyDependencySpecifier {
name,
version: package.1,
realm,
index_url: crate::cli::DEFAULT_WALLY_INDEX_URL.parse().unwrap(),
},
),
};
fn insert_into(
deps: &mut BTreeMap<String, DependencySpecifier>,
specifier: DependencySpecifier,
name: PackageName,
) {
macro_rules! not_taken {
($key:expr) => {
(!deps.contains_key(&$key)).then_some($key)
};
}
let key = not_taken!(name.name().to_string())
.or_else(|| not_taken!(format!("{}/{}", name.scope(), name.name())))
.or_else(|| not_taken!(name.to_string()))
.unwrap();
deps.insert(key, specifier);
}
if peer {
insert_into(
&mut manifest.peer_dependencies,
specifier,
package.0.clone(),
);
} else {
insert_into(&mut manifest.dependencies, specifier, package.0.clone());
}
manifest.write(CWD.to_path_buf())?
}
Command::Remove { package } => {
let mut manifest = project.manifest().clone();
for dependencies in [&mut manifest.dependencies, &mut manifest.peer_dependencies] {
dependencies.retain(|_, d| {
if let DependencySpecifier::Registry(registry) = d {
match &package {
PackageName::Standard(name) => &registry.name != name,
#[cfg(feature = "wally")]
PackageName::Wally(_) => true,
}
} else {
cfg_if! {
if #[cfg(feature = "wally")] {
#[allow(clippy::collapsible_else_if)]
if let DependencySpecifier::Wally(wally) = d {
match &package {
PackageName::Standard(_) => true,
PackageName::Wally(name) => &wally.name != name,
}
} else {
true
}
} else {
true
}
}
}
});
}
manifest.write(project.path())?
}
Command::Outdated => {
let project = Lazy::force_mut(&mut project);
let manifest = project.manifest().clone();
let lockfile = manifest.dependency_graph(project, false)?;
for (name, versions) in &lockfile.children {
for (version, resolved_pkg) in versions {
if lockfile.root_specifier(resolved_pkg).is_none() {
continue;
}
if let PackageRef::Registry(registry) = &resolved_pkg.pkg_ref {
let latest_version = send_request(REQWEST_CLIENT.get(format!(
"{}/v0/packages/{}/{}/versions",
resolved_pkg.pkg_ref.get_index(project).config()?.api(),
registry.name.scope(),
registry.name.name()
)))?
.json::<Value>()?
.as_array()
.and_then(|a| a.last())
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<Version>().ok())
.ok_or(anyhow::anyhow!(
"failed to get latest version of {name}@{version}"
))?;
if &latest_version > version {
println!(
"{name}@{version} is outdated. latest version: {latest_version}"
);
}
}
}
}
}
#[cfg(feature = "wally")]
Command::Convert => {
Manifest::from_path_or_convert(CWD.to_path_buf())?;
}
_ => unreachable!(),
}
Ok(())
}

View file

@ -1,237 +0,0 @@
use std::{
fs::create_dir_all,
hash::{DefaultHasher, Hash, Hasher},
path::Path,
sync::Arc,
};
use git2::{build::RepoBuilder, Repository};
use log::{debug, error, warn};
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use crate::{
index::{remote_callbacks, CredentialsFn},
manifest::{update_sync_tool_files, Manifest, ManifestConvertError, Realm},
package_name::StandardPackageName,
project::{get_index, Indices},
};
/// A dependency of a package that can be downloaded from a git repository
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct GitDependencySpecifier {
/// The URL of the git repository (can be in the form of `owner/repo`, in which case it will default to GitHub)
pub repo: String,
/// The revision of the git repository to use
pub rev: String,
/// The realm of the package
#[serde(skip_serializing_if = "Option::is_none")]
pub realm: Option<Realm>,
}
/// A reference to a package that can be downloaded from a git repository
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct GitPackageRef {
/// The name of the package
pub name: StandardPackageName,
/// The version of the package
pub version: Version,
/// The URL of the git repository
pub repo_url: Url,
/// The revision of the git repository to use
pub rev: String,
}
/// An error that occurred while downloading a git repository
#[derive(Debug, Error)]
pub enum GitDownloadError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while reading the manifest of the git repository
#[error("error reading manifest")]
ManifestRead(#[from] ManifestConvertError),
/// An error that occurred because the URL is invalid
#[error("invalid URL")]
InvalidUrl(#[from] url::ParseError),
/// An error that occurred while resolving a git dependency's manifest
#[error("error resolving git dependency manifest")]
Resolve(#[from] GitManifestResolveError),
}
/// An error that occurred while resolving a git dependency's manifest
#[derive(Debug, Error)]
pub enum GitManifestResolveError {
/// An error that occurred because the scope and name could not be extracted from the URL
#[error("could not extract scope and name from URL: {0}")]
ScopeAndNameFromUrl(Url),
/// An error that occurred because the package name is invalid
#[error("invalid package name")]
InvalidPackageName(#[from] crate::package_name::StandardPackageNameValidationError),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
}
fn to_snake_case(s: &str) -> String {
s.chars()
.enumerate()
.map(|(i, c)| {
if c.is_uppercase() {
format!("{}{}", if i == 0 { "" } else { "_" }, c.to_lowercase())
} else if c == '-' {
"_".to_string()
} else {
c.to_string()
}
})
.collect()
}
pub(crate) fn manifest(path: &Path, url: &Url) -> Result<Manifest, GitManifestResolveError> {
Manifest::from_path_or_convert(path).or_else(|_| {
let (scope, name) = url
.path_segments()
.and_then(|mut s| {
let scope = s.next();
let name = s.next();
if let (Some(scope), Some(name)) = (scope, name) {
Some((scope.to_string(), name.to_string()))
} else {
None
}
})
.ok_or_else(|| GitManifestResolveError::ScopeAndNameFromUrl(url.clone()))?;
let manifest = Manifest {
name: StandardPackageName::new(
&to_snake_case(&scope),
&to_snake_case(name.trim_end_matches(".git")),
)?,
version: Version::new(0, 1, 0),
description: None,
license: None,
authors: None,
repository: None,
exports: Default::default(),
path_style: Default::default(),
private: true,
realm: None,
indices: Default::default(),
#[cfg(feature = "wally")]
sourcemap_generator: None,
overrides: Default::default(),
dependencies: Default::default(),
peer_dependencies: Default::default(),
};
manifest.write(path).unwrap();
update_sync_tool_files(path, manifest.name.name().to_string())?;
Ok(manifest)
})
}
impl GitDependencySpecifier {
pub(crate) fn resolve(
&self,
cache_dir: &Path,
indices: &Indices,
) -> Result<(Manifest, Url, String), GitDownloadError> {
debug!("resolving git dependency {}", self.repo);
// should also work with ssh urls
let repo_url = if self.repo.contains(':') {
debug!("resolved git repository name to: {}", self.repo);
Url::parse(&self.repo)
} else {
debug!("assuming git repository is a name: {}", self.repo);
Url::parse(&format!("https://github.com/{}.git", &self.repo))
}?;
debug!("resolved git repository url to: {}", &repo_url);
let mut hasher = DefaultHasher::new();
repo_url.hash(&mut hasher);
self.rev.hash(&mut hasher);
let repo_hash = hasher.finish();
let dest = cache_dir.join("git").join(repo_hash.to_string());
let repo = if !dest.exists() {
create_dir_all(&dest)?;
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks!(get_index(indices, None)));
RepoBuilder::new()
.fetch_options(fetch_options)
.clone(repo_url.as_ref(), &dest)?
} else {
Repository::open(&dest)?
};
let obj = repo.revparse_single(&self.rev)?;
debug!("resolved git revision {} to: {}", self.rev, obj.id());
repo.reset(&obj, git2::ResetType::Hard, None)?;
Ok((manifest(&dest, &repo_url)?, repo_url, obj.id().to_string()))
}
}
impl GitPackageRef {
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>>(
&self,
dest: P,
credentials_fn: Option<Arc<CredentialsFn>>,
) -> Result<(), GitDownloadError> {
let mut fetch_options = git2::FetchOptions::new();
let mut remote_callbacks = git2::RemoteCallbacks::new();
let credentials_fn = credentials_fn.map(|f| f());
if let Some(credentials_fn) = credentials_fn {
debug!("authenticating this git clone with credentials");
remote_callbacks.credentials(credentials_fn);
} else {
debug!("no credentials provided for this git clone");
}
fetch_options.remote_callbacks(remote_callbacks);
let repo = RepoBuilder::new()
.fetch_options(fetch_options)
.clone(self.repo_url.as_ref(), dest.as_ref())?;
let obj = repo.revparse_single(&self.rev)?;
if self.rev != obj.id().to_string() {
warn!(
"git package ref {} resolved to a different revision: {}. this shouldn't happen",
self.rev,
obj.id()
);
}
repo.reset(&obj, git2::ResetType::Hard, None)?;
Ok(())
}
}

View file

@ -1,443 +0,0 @@
use std::{
fmt::Display,
fs::create_dir_all,
path::{Path, PathBuf},
sync::Arc,
};
use cfg_if::cfg_if;
use log::debug;
use reqwest::header::AUTHORIZATION;
use semver::Version;
use serde::{de::IntoDeserializer, Deserialize, Deserializer, Serialize};
use serde_yaml::Value;
use thiserror::Error;
use url::Url;
use crate::{
dependencies::{
git::{GitDependencySpecifier, GitPackageRef},
registry::{RegistryDependencySpecifier, RegistryPackageRef},
resolution::RootLockfileNode,
},
index::{CredentialsFn, Index},
manifest::{ManifestWriteError, Realm},
multithread::MultithreadedJob,
package_name::PackageName,
project::{get_index, get_index_by_url, InstallProjectError, Project},
};
/// Git dependency related stuff
pub mod git;
/// Registry dependency related stuff
pub mod registry;
/// Resolution
pub mod resolution;
/// Wally dependency related stuff
#[cfg(feature = "wally")]
pub mod wally;
// To improve developer experience, we resolve the type of the dependency specifier with a custom deserializer, so that the user doesn't have to specify the type of the dependency
/// A dependency of a package
#[derive(Serialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(untagged)]
pub enum DependencySpecifier {
/// A dependency that can be downloaded from a registry
Registry(RegistryDependencySpecifier),
/// A dependency that can be downloaded from a git repository
Git(GitDependencySpecifier),
/// A dependency that can be downloaded from a wally registry
#[cfg(feature = "wally")]
Wally(wally::WallyDependencySpecifier),
}
impl DependencySpecifier {
/// Gets the name (or repository) of the specifier
pub fn name(&self) -> String {
match self {
DependencySpecifier::Registry(registry) => registry.name.to_string(),
DependencySpecifier::Git(git) => git.repo.to_string(),
#[cfg(feature = "wally")]
DependencySpecifier::Wally(wally) => wally.name.to_string(),
}
}
/// Gets the version (or revision) of the specifier
pub fn version(&self) -> String {
match self {
DependencySpecifier::Registry(registry) => registry.version.to_string(),
DependencySpecifier::Git(git) => git.rev.clone(),
#[cfg(feature = "wally")]
DependencySpecifier::Wally(wally) => wally.version.to_string(),
}
}
/// Gets the realm of the specifier
pub fn realm(&self) -> Option<&Realm> {
match self {
DependencySpecifier::Registry(registry) => registry.realm.as_ref(),
DependencySpecifier::Git(git) => git.realm.as_ref(),
#[cfg(feature = "wally")]
DependencySpecifier::Wally(wally) => wally.realm.as_ref(),
}
}
}
impl<'de> Deserialize<'de> for DependencySpecifier {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let yaml = Value::deserialize(deserializer)?;
let result = if yaml.get("repo").is_some() {
GitDependencySpecifier::deserialize(yaml.into_deserializer())
.map(DependencySpecifier::Git)
} else if yaml.get("name").is_some() {
RegistryDependencySpecifier::deserialize(yaml.into_deserializer())
.map(DependencySpecifier::Registry)
} else if yaml.get("wally").is_some() {
cfg_if! {
if #[cfg(feature = "wally")] {
wally::WallyDependencySpecifier::deserialize(yaml.into_deserializer())
.map(DependencySpecifier::Wally)
} else {
Err(serde::de::Error::custom("wally is not enabled"))
}
}
} else {
Err(serde::de::Error::custom("invalid dependency"))
};
result.map_err(|e| serde::de::Error::custom(e.to_string()))
}
}
// Here we don't use a custom deserializer, because this is exposed to the user only from the lock file, which mustn't be edited manually anyway
/// A reference to a package
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum PackageRef {
/// A reference to a package that can be downloaded from a registry
Registry(RegistryPackageRef),
/// A reference to a package that can be downloaded from a git repository
Git(GitPackageRef),
/// A reference to a package that can be downloaded from a wally registry
#[cfg(feature = "wally")]
Wally(wally::WallyPackageRef),
}
/// An error that occurred while downloading a package
#[derive(Debug, Error)]
pub enum DownloadError {
/// An error that occurred while downloading a package from a registry
#[error("error downloading package {1} from registry")]
Registry(#[source] registry::RegistryDownloadError, Box<PackageRef>),
/// An error that occurred while downloading a package from a git repository
#[error("error downloading package {1} from git repository")]
Git(#[source] git::GitDownloadError, Box<PackageRef>),
/// An error that occurred while downloading a package from a wally registry
#[cfg(feature = "wally")]
#[error("error downloading package {1} from wally registry")]
Wally(#[source] wally::WallyDownloadError, Box<PackageRef>),
/// A URL is required for this type of package reference
#[error("a URL is required for this type of package reference")]
UrlRequired,
}
/// An error that occurred while resolving a URL
#[derive(Debug, Error)]
pub enum UrlResolveError {
/// An error that occurred while resolving a URL of a registry package
#[error("error resolving URL of registry package")]
Registry(#[from] registry::RegistryUrlResolveError),
/// An error that occurred while resolving a URL of a wally package
#[cfg(feature = "wally")]
#[error("error resolving URL of wally package")]
Wally(#[from] wally::ResolveWallyUrlError),
}
impl PackageRef {
/// Gets the name of the package
pub fn name(&self) -> PackageName {
match self {
PackageRef::Registry(registry) => PackageName::Standard(registry.name.clone()),
PackageRef::Git(git) => PackageName::Standard(git.name.clone()),
#[cfg(feature = "wally")]
PackageRef::Wally(wally) => PackageName::Wally(wally.name.clone()),
}
}
/// Gets the version of the package
pub fn version(&self) -> &Version {
match self {
PackageRef::Registry(registry) => &registry.version,
PackageRef::Git(git) => &git.version,
#[cfg(feature = "wally")]
PackageRef::Wally(wally) => &wally.version,
}
}
/// Returns the URL of the index
pub fn index_url(&self) -> Option<Url> {
match self {
PackageRef::Registry(registry) => Some(registry.index_url.clone()),
PackageRef::Git(_) => None,
#[cfg(feature = "wally")]
PackageRef::Wally(wally) => Some(wally.index_url.clone()),
}
}
/// Resolves the URL of the package
pub fn resolve_url(&self, project: &mut Project) -> Result<Option<Url>, UrlResolveError> {
Ok(match &self {
PackageRef::Registry(registry) => Some(registry.resolve_url(project.indices())?),
PackageRef::Git(_) => None,
#[cfg(feature = "wally")]
PackageRef::Wally(wally) => {
let cache_dir = project.cache_dir().to_path_buf();
Some(wally.resolve_url(&cache_dir, project.indices_mut())?)
}
})
}
/// Gets the index of the package
pub fn get_index<'a>(&self, project: &'a Project) -> &'a dyn Index {
match &self.index_url() {
Some(url) => get_index_by_url(project.indices(), url),
None => get_index(project.indices(), None),
}
}
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>>(
&self,
reqwest_client: &reqwest::blocking::Client,
registry_auth_token: Option<String>,
url: Option<&Url>,
credentials_fn: Option<Arc<CredentialsFn>>,
dest: P,
) -> Result<(), DownloadError> {
match self {
PackageRef::Registry(registry) => registry
.download(
reqwest_client,
url.ok_or(DownloadError::UrlRequired)?,
registry_auth_token,
dest,
)
.map_err(|e| DownloadError::Registry(e, Box::new(self.clone()))),
PackageRef::Git(git) => git
.download(dest, credentials_fn)
.map_err(|e| DownloadError::Git(e, Box::new(self.clone()))),
#[cfg(feature = "wally")]
PackageRef::Wally(wally) => wally
.download(
reqwest_client,
url.ok_or(DownloadError::UrlRequired)?,
registry_auth_token,
dest,
)
.map_err(|e| DownloadError::Wally(e, Box::new(self.clone()))),
}
}
}
/// An error that occurred while converting a manifest
#[derive(Debug, Error)]
pub enum ConvertManifestsError {
/// An error that occurred while converting the manifest
#[error("error converting the manifest")]
Manifest(#[from] crate::manifest::ManifestConvertError),
/// An error that occurred while converting a git dependency's manifest
#[error("error converting a git dependency's manifest")]
Git(#[from] crate::dependencies::git::GitManifestResolveError),
/// An error that occurred while reading the sourcemap
#[error("error reading the sourcemap")]
Sourcemap(#[from] std::io::Error),
/// An error that occurred while parsing the sourcemap
#[cfg(feature = "wally")]
#[error("error parsing the sourcemap")]
Parse(#[from] serde_json::Error),
/// An error that occurred while writing the manifest
#[error("error writing the manifest")]
Write(#[from] ManifestWriteError),
/// A manifest is not present in a dependency, and the wally feature is not enabled
#[cfg(not(feature = "wally"))]
#[error("wally feature is not enabled, but the manifest is not present in the dependency")]
ManifestNotPresent,
}
impl Project {
/// Downloads the project's dependencies
pub fn download(
&mut self,
lockfile: &RootLockfileNode,
) -> Result<MultithreadedJob<DownloadError>, InstallProjectError> {
let (job, tx) = MultithreadedJob::new();
for (name, versions) in lockfile.children.clone() {
for (version, resolved_package) in versions {
let (_, source) = resolved_package.directory(self.path());
if source.exists() {
debug!("package {name}@{version} already downloaded, skipping...");
continue;
}
debug!(
"downloading package {name}@{version} to {}",
source.display()
);
create_dir_all(&source)?;
let reqwest_client = self.reqwest_client.clone();
let url = resolved_package.pkg_ref.resolve_url(self)?;
let index = resolved_package.pkg_ref.get_index(self);
let registry_auth_token = index.registry_auth_token().map(|t| t.to_string());
let credentials_fn = index.credentials_fn().cloned();
job.execute(&tx, move || {
resolved_package.pkg_ref.download(
&reqwest_client,
registry_auth_token,
url.as_ref(),
credentials_fn,
source,
)
});
}
}
Ok(job)
}
/// Converts the manifests of the project's dependencies
#[cfg(feature = "wally")]
pub fn convert_manifests<F: Fn(PathBuf) + Send + Sync + 'static>(
&self,
lockfile: &RootLockfileNode,
generate_sourcemap: F,
) -> MultithreadedJob<ConvertManifestsError> {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SourcemapNode {
#[serde(default)]
file_paths: Vec<relative_path::RelativePathBuf>,
}
let (job, tx) = MultithreadedJob::new();
let generate_sourcemap = Arc::new(generate_sourcemap);
for versions in lockfile.children.values() {
for resolved_package in versions.values().cloned() {
let generate_sourcemap = generate_sourcemap.clone();
let self_path = self.path().to_path_buf();
job.execute(&tx, move || {
let source = match &resolved_package.pkg_ref {
PackageRef::Wally(_) | PackageRef::Git(_) => {
resolved_package.directory(self_path).1
}
_ => return Ok(()),
};
let mut manifest = match &resolved_package.pkg_ref {
PackageRef::Git(git) => {
crate::dependencies::git::manifest(&source, &git.repo_url)?
}
_ => crate::manifest::Manifest::from_path_or_convert(&source)?,
};
generate_sourcemap(source.to_path_buf());
let sourcemap = source.join("sourcemap.json");
let sourcemap: SourcemapNode = if sourcemap.exists() {
serde_json::from_str(&std::fs::read_to_string(&sourcemap)?)?
} else {
log::warn!("sourcemap for {resolved_package} not found, skipping...");
return Ok(());
};
manifest.exports.lib = sourcemap
.file_paths
.into_iter()
.find(|path| {
path.extension()
.is_some_and(|ext| ext == "lua" || ext == "luau")
})
.or_else(|| Some(relative_path::RelativePathBuf::from("true")));
manifest.write(&source)?;
Ok(())
});
}
}
job
}
/// Errors if dependencies don't have manifests, enable the `wally` feature to convert them
#[cfg(not(feature = "wally"))]
pub fn convert_manifests<F: Fn(PathBuf)>(
&self,
lockfile: &RootLockfileNode,
_generate_sourcemap: F,
) -> Result<(), ConvertManifestsError> {
for versions in lockfile.children.values() {
for resolved_package in versions.values() {
let source = match &resolved_package.pkg_ref {
PackageRef::Git(_) => resolved_package.directory(self.path()).1,
_ => continue,
};
if match &resolved_package.pkg_ref {
PackageRef::Git(git) => {
crate::dependencies::git::manifest(&source, &git.repo_url).is_err()
}
_ => crate::manifest::Manifest::from_path_or_convert(&source).is_err(),
} {
return Err(ConvertManifestsError::ManifestNotPresent);
}
}
}
Ok(())
}
}
impl Display for PackageRef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@{}", self.name(), self.version())
}
}
pub(crate) fn maybe_authenticated_request(
reqwest_client: &reqwest::blocking::Client,
url: &str,
registry_auth_token: Option<String>,
) -> reqwest::blocking::RequestBuilder {
let mut builder = reqwest_client.get(url);
debug!("sending request to {}", url);
if let Some(token) = registry_auth_token {
let hidden_token = token
.chars()
.enumerate()
.map(|(i, c)| if i <= 8 { c } else { '*' })
.collect::<String>();
debug!("with registry token {hidden_token}");
builder = builder.header(AUTHORIZATION, format!("Bearer {token}"));
}
builder
}

View file

@ -1,148 +0,0 @@
use std::path::Path;
use log::{debug, error};
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use crate::{
dependencies::maybe_authenticated_request,
manifest::Realm,
package_name::StandardPackageName,
project::{get_index_by_url, Indices, DEFAULT_INDEX_NAME},
};
fn default_index_name() -> String {
DEFAULT_INDEX_NAME.to_string()
}
/// A dependency of a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct RegistryDependencySpecifier {
/// The name of the package
pub name: StandardPackageName,
/// The version requirement of the package
pub version: VersionReq,
/// The name of the index to use
#[serde(default = "default_index_name")]
pub index: String,
/// The realm of the package
#[serde(skip_serializing_if = "Option::is_none")]
pub realm: Option<Realm>,
}
/// A reference to a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct RegistryPackageRef {
/// The name of the package
pub name: StandardPackageName,
/// The version of the package
pub version: Version,
/// The index URL of the package
pub index_url: Url,
}
/// An error that occurred while downloading a package from a registry
#[derive(Debug, Error)]
pub enum RegistryDownloadError {
/// An error that occurred while interacting with reqwest
#[error("error interacting with reqwest")]
Reqwest(#[from] reqwest::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while reading the index config
#[error("error with the index config")]
IndexConfig(#[from] crate::index::ConfigError),
/// The package was not found on the registry
#[error("package {0} not found on the registry, but found in the index")]
NotFound(StandardPackageName),
/// The user is unauthorized to download the package
#[error("unauthorized to download package {0}")]
Unauthorized(StandardPackageName),
/// An HTTP error occurred
#[error("http error {0}: the server responded with {1}")]
Http(reqwest::StatusCode, String),
/// An error occurred while parsing the api URL
#[error("error parsing the API URL")]
UrlParse(#[from] url::ParseError),
}
/// An error that occurred while resolving the url of a registry package
#[derive(Debug, Error)]
pub enum RegistryUrlResolveError {
/// An error that occurred while reading the index config
#[error("error with the index config")]
IndexConfig(#[from] crate::index::ConfigError),
/// An error occurred while parsing the api URL
#[error("error parsing the API URL")]
UrlParse(#[from] url::ParseError),
}
impl RegistryPackageRef {
/// Resolves the download URL of the package
pub fn resolve_url(&self, indices: &Indices) -> Result<Url, RegistryUrlResolveError> {
let index = get_index_by_url(indices, &self.index_url);
let config = index.config()?;
let url = config
.download()
.replace("{PACKAGE_AUTHOR}", self.name.scope())
.replace("{PACKAGE_NAME}", self.name.name())
.replace("{PACKAGE_VERSION}", &self.version.to_string());
Ok(Url::parse(&url)?)
}
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>>(
&self,
reqwest_client: &reqwest::blocking::Client,
url: &Url,
registry_auth_token: Option<String>,
dest: P,
) -> Result<(), RegistryDownloadError> {
debug!(
"downloading registry package {}@{} from {}",
self.name, self.version, url
);
let response =
maybe_authenticated_request(reqwest_client, url.as_str(), registry_auth_token)
.send()?;
if !response.status().is_success() {
return match response.status() {
reqwest::StatusCode::NOT_FOUND => {
Err(RegistryDownloadError::NotFound(self.name.clone()))
}
reqwest::StatusCode::UNAUTHORIZED => {
Err(RegistryDownloadError::Unauthorized(self.name.clone()))
}
_ => Err(RegistryDownloadError::Http(
response.status(),
response.text()?,
)),
};
}
let bytes = response.bytes()?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder);
archive.unpack(&dest)?;
Ok(())
}
}

View file

@ -1,596 +0,0 @@
use std::{
collections::{BTreeMap, HashMap, HashSet, VecDeque},
fmt::Display,
path::{Path, PathBuf},
};
use log::debug;
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
dependencies::{
git::{GitDownloadError, GitPackageRef},
registry::RegistryPackageRef,
DependencySpecifier, PackageRef,
},
index::{Index, IndexFileEntry, IndexPackageError},
manifest::{DependencyType, Manifest, OverrideKey, Realm},
package_name::{PackageName, StandardPackageName},
project::{get_index, get_index_by_url, Indices, Project, ReadLockfileError},
DEV_PACKAGES_FOLDER, INDEX_FOLDER, PACKAGES_FOLDER, SERVER_PACKAGES_FOLDER,
};
/// A mapping of packages to something
pub type PackageMap<T> = BTreeMap<PackageName, BTreeMap<Version, T>>;
/// The root node of the dependency graph
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct RootLockfileNode {
/// The name of the package
pub name: StandardPackageName,
/// Dependency overrides
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub overrides: BTreeMap<OverrideKey, DependencySpecifier>,
/// The specifiers of the root packages
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub specifiers: PackageMap<(DependencySpecifier, String)>,
/// All nodes in the dependency graph
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub children: PackageMap<ResolvedPackage>,
}
impl RootLockfileNode {
/// Returns the specifier of the root package
pub fn root_specifier(
&self,
resolved_package: &ResolvedPackage,
) -> Option<&(DependencySpecifier, String)> {
self.specifiers
.get(&resolved_package.pkg_ref.name())
.and_then(|versions| versions.get(resolved_package.pkg_ref.version()))
}
}
/// A node in the dependency graph
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct ResolvedPackage {
/// The reference to the package
pub pkg_ref: PackageRef,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageName, (Version, String)>,
/// The realm of the package
pub realm: Realm,
/// The type of the dependency
#[serde(default, skip_serializing_if = "crate::is_default")]
pub dep_type: DependencyType,
}
impl Display for ResolvedPackage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.pkg_ref)
}
}
pub(crate) fn packages_folder<'a>(realm: Realm) -> &'a str {
match realm {
Realm::Shared => PACKAGES_FOLDER,
Realm::Server => SERVER_PACKAGES_FOLDER,
Realm::Development => DEV_PACKAGES_FOLDER,
}
}
impl ResolvedPackage {
pub(crate) fn packages_folder(&self) -> &str {
packages_folder(self.realm)
}
/// Returns the directory of the package in the project, and the parent of the directory
pub fn directory<P: AsRef<Path>>(&self, project_path: P) -> (PathBuf, PathBuf) {
let name = self.pkg_ref.name().escaped();
let container_path = project_path
.as_ref()
.join(self.packages_folder())
.join(INDEX_FOLDER)
.join(&name)
.join(self.pkg_ref.version().to_string());
(container_path.clone(), container_path.join(&name))
}
}
macro_rules! find_highest {
($iter:expr, $version:expr) => {
$iter
.filter(|v| $version.matches(v))
.max_by(|a, b| a.cmp(&b))
.cloned()
};
}
fn find_version_from_index(
root: &mut RootLockfileNode,
index: &dyn Index,
specifier: &DependencySpecifier,
name: PackageName,
version_req: &VersionReq,
) -> Result<IndexFileEntry, ResolveError> {
let index_entries = index
.package(&name)
.map_err(|e| ResolveError::IndexPackage(e, name.to_string()))?
.ok_or_else(|| ResolveError::PackageNotFound(name.to_string()))?;
let resolved_versions = root.children.entry(name).or_default();
// try to find the highest already downloaded version that satisfies the requirement, otherwise find the highest satisfying version in the index
let Some(version) = find_highest!(resolved_versions.keys(), version_req)
.or_else(|| find_highest!(index_entries.iter().map(|v| &v.version), version_req))
else {
return Err(ResolveError::NoSatisfyingVersion(Box::new(
specifier.clone(),
)));
};
Ok(index_entries
.into_iter()
.find(|e| e.version.eq(&version))
.unwrap())
}
fn find_realm(a: &Realm, b: &Realm) -> Realm {
if a == b {
return *a;
}
Realm::Shared
}
/// An error that occurred while resolving dependencies
#[derive(Debug, Error)]
pub enum ResolveError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred because a registry dependency conflicts with a git dependency
#[error("registry dependency {0}@{1} conflicts with git dependency")]
RegistryConflict(String, Version),
/// An error that occurred because a git dependency conflicts with a registry dependency
#[error("git dependency {0}@{1} conflicts with registry dependency")]
GitConflict(String, Version),
/// An error that occurred because no satisfying version was found for a dependency
#[error("no satisfying version found for dependency {0:?}")]
NoSatisfyingVersion(Box<DependencySpecifier>),
/// An error that occurred while downloading a package from a git repository
#[error("error downloading git package")]
GitDownload(#[from] GitDownloadError),
/// An error that occurred because a package was not found in the index
#[error("package {0} not found in index")]
PackageNotFound(String),
/// An error that occurred while getting a package from the index
#[error("failed to get package {1} from index")]
IndexPackage(#[source] IndexPackageError, String),
/// An error that occurred while reading the lockfile
#[error("failed to read lockfile")]
LockfileRead(#[from] ReadLockfileError),
/// An error that occurred because the lockfile is out of date
#[error("out of date lockfile")]
OutOfDateLockfile,
/// An error that occurred because two realms are incompatible
#[error("incompatible realms for package {0} (package specified {1}, user specified {2})")]
IncompatibleRealms(String, Realm, Realm),
/// An error that occurred because a peer dependency is not installed
#[error("peer dependency {0}@{1} is not installed")]
PeerNotInstalled(String, Version),
/// An error that occurred while cloning a wally index
#[cfg(feature = "wally")]
#[error("error cloning wally index")]
CloneWallyIndex(#[from] crate::dependencies::wally::CloneWallyIndexError),
/// An error that occurred while parsing a URL
#[error("error parsing URL")]
UrlParse(#[from] url::ParseError),
}
fn get_by_maybe_url<'a>(indices: &'a Indices, maybe_url: &'a str) -> &'a dyn Index {
if let Ok(url) = maybe_url.parse() {
get_index_by_url(indices, &url)
} else {
get_index(indices, Some(maybe_url))
}
}
impl Manifest {
fn missing_dependencies(
&self,
root: &mut RootLockfileNode,
locked: bool,
project: &Project,
) -> Result<BTreeMap<String, (DependencySpecifier, DependencyType)>, ResolveError> {
Ok(if let Some(old_root) = project.lockfile()? {
if self.name != old_root.name && locked {
return Err(ResolveError::OutOfDateLockfile);
}
if self.overrides != old_root.overrides {
// TODO: resolve only the changed dependencies (will this be worth it?)
debug!("overrides have changed, resolving all dependencies");
return Ok(self.dependencies());
}
debug!("lockfile found, resolving dependencies from it");
let mut missing = BTreeMap::new();
let current_dependencies = self.dependencies();
let current_specifiers = current_dependencies
.clone()
.into_iter()
.map(|(desired_name, (specifier, _))| (specifier, desired_name))
.collect::<HashMap<_, _>>();
// populate the new lockfile with all root dependencies (and their dependencies) from the old lockfile
for (name, versions) in &old_root.children {
for (version, resolved_package) in versions {
let Some((old_specifier, desired_name)) = old_root
.root_specifier(resolved_package)
.and_then(|(old_specifier, _)| {
current_specifiers
.get(old_specifier)
.map(|desired_name| (old_specifier, desired_name))
})
else {
continue;
};
root.specifiers.entry(name.clone()).or_default().insert(
version.clone(),
(old_specifier.clone(), desired_name.clone()),
);
let mut queue = VecDeque::from([(resolved_package, 0usize)]);
while let Some((resolved_package, depth)) = queue.pop_front() {
debug!(
"{}resolved {resolved_package} from lockfile",
"\t".repeat(depth)
);
root.children
.entry(resolved_package.pkg_ref.name())
.or_default()
.insert(
resolved_package.pkg_ref.version().clone(),
resolved_package.clone(),
);
for (dep_name, (dep_version, _)) in &resolved_package.dependencies {
if root
.children
.get(dep_name)
.and_then(|v| v.get(dep_version))
.is_some()
{
continue;
}
let Some(dep) = old_root
.children
.get(dep_name)
.and_then(|v| v.get(dep_version))
else {
return Err(ResolveError::OutOfDateLockfile);
};
queue.push_back((dep, depth + 1));
}
}
}
}
let old_specifiers = old_root
.specifiers
.values()
.flat_map(|v| v.values())
.map(|(specifier, _)| specifier)
.collect::<HashSet<_>>();
// resolve new, or modified, dependencies from the manifest
for (desired_name, (specifier, dep_type)) in current_dependencies {
if old_specifiers.contains(&specifier) {
continue;
}
if locked {
return Err(ResolveError::OutOfDateLockfile);
}
missing.insert(desired_name, (specifier.clone(), dep_type));
}
debug!(
"resolved {} dependencies from lockfile. new dependencies: {}",
old_root.children.len(),
missing.len()
);
missing
} else {
debug!("no lockfile found, resolving all dependencies");
self.dependencies()
})
}
/// Resolves the dependency graph for the project
pub fn dependency_graph(
&self,
project: &mut Project,
locked: bool,
) -> Result<RootLockfileNode, ResolveError> {
debug!("resolving dependency graph for project {}", self.name);
// try to reuse versions (according to semver specifiers) to decrease the amount of downloads and storage
let mut root = RootLockfileNode {
name: self.name.clone(),
overrides: self.overrides.clone(),
specifiers: Default::default(),
children: Default::default(),
};
let missing_dependencies = self.missing_dependencies(&mut root, locked, project)?;
if missing_dependencies.is_empty() {
debug!("no dependencies left to resolve, finishing...");
return Ok(root);
}
let overrides = self
.overrides
.iter()
.flat_map(|(k, spec)| k.0.iter().map(|path| (path, spec.clone())))
.collect::<HashMap<_, _>>();
debug!("resolving {} dependencies", missing_dependencies.len());
let mut queue = missing_dependencies
.into_iter()
.map(|(desired_name, (specifier, dep_type))| {
(desired_name, specifier, dep_type, None, vec![])
})
.collect::<VecDeque<_>>();
while let Some((desired_name, specifier, dep_type, dependant, mut path)) = queue.pop_front()
{
let depth = path.len();
let (pkg_ref, default_realm, dependencies) = match &specifier {
DependencySpecifier::Registry(registry_dependency) => {
// needed because of overrides, which are expected to use the project's indices rather than URLs
let index = get_by_maybe_url(project.indices(), &registry_dependency.index);
let entry = find_version_from_index(
&mut root,
index,
&specifier,
registry_dependency.name.clone().into(),
&registry_dependency.version,
)?;
debug!(
"{}resolved registry dependency {} to {}",
"\t".repeat(depth),
registry_dependency.name,
entry.version
);
(
PackageRef::Registry(RegistryPackageRef {
name: registry_dependency.name.clone(),
version: entry.version,
index_url: index.url().clone(),
}),
entry.realm,
entry.dependencies,
)
}
DependencySpecifier::Git(git_dependency) => {
let (manifest, url, rev) =
git_dependency.resolve(project.cache_dir(), project.indices())?;
debug!(
"{}resolved git dependency {} to {url}#{rev}",
"\t".repeat(depth),
git_dependency.repo
);
(
PackageRef::Git(GitPackageRef {
name: manifest.name.clone(),
version: manifest.version.clone(),
repo_url: url,
rev,
}),
manifest.realm,
manifest.dependencies(),
)
}
#[cfg(feature = "wally")]
DependencySpecifier::Wally(wally_dependency) => {
let cache_dir = project.cache_dir().to_path_buf();
let index = crate::dependencies::wally::clone_wally_index(
&cache_dir,
project.indices_mut(),
&wally_dependency.index_url,
)?;
let entry = find_version_from_index(
&mut root,
&index,
&specifier,
wally_dependency.name.clone().into(),
&wally_dependency.version,
)?;
debug!(
"{}resolved wally dependency {} to {}",
"\t".repeat(depth),
wally_dependency.name,
entry.version
);
(
PackageRef::Wally(crate::dependencies::wally::WallyPackageRef {
name: wally_dependency.name.clone(),
version: entry.version,
index_url: index.url().clone(),
}),
entry.realm,
entry.dependencies,
)
}
};
// if the dependency is a root dependency, it can be thought of as a normal dependency
let dep_type = if dependant.is_some() {
dep_type
} else {
DependencyType::Normal
};
let specifier_realm = specifier.realm().copied();
if let Some((dependant_name, dependant_version)) = dependant {
root.children
.get_mut(&dependant_name)
.and_then(|v| v.get_mut(&dependant_version))
.unwrap()
.dependencies
.insert(
pkg_ref.name(),
(pkg_ref.version().clone(), desired_name.clone()),
);
} else {
root.specifiers
.entry(pkg_ref.name())
.or_default()
.insert(pkg_ref.version().clone(), (specifier, desired_name.clone()));
}
let resolved_versions = root.children.entry(pkg_ref.name()).or_default();
if let Some(previously_resolved) = resolved_versions.get_mut(pkg_ref.version()) {
match (&pkg_ref, &previously_resolved.pkg_ref) {
(PackageRef::Registry(r), PackageRef::Git(_g)) => {
return Err(ResolveError::RegistryConflict(
r.name.to_string(),
r.version.clone(),
));
}
(PackageRef::Git(g), PackageRef::Registry(_r)) => {
return Err(ResolveError::GitConflict(
g.name.to_string(),
g.version.clone(),
));
}
_ => (),
}
if previously_resolved.dep_type == DependencyType::Peer
&& dep_type == DependencyType::Normal
{
previously_resolved.dep_type = dep_type;
}
// need not resolve the package again
continue;
}
if specifier_realm.is_some_and(|realm| realm == Realm::Shared)
&& default_realm.is_some_and(|realm| realm == Realm::Server)
{
return Err(ResolveError::IncompatibleRealms(
pkg_ref.name().to_string(),
default_realm.unwrap(),
specifier_realm.unwrap(),
));
}
resolved_versions.insert(
pkg_ref.version().clone(),
ResolvedPackage {
pkg_ref: pkg_ref.clone(),
dependencies: Default::default(),
realm: specifier_realm
.unwrap_or_default()
.or(default_realm.unwrap_or_default()),
dep_type,
},
);
path.push(desired_name);
for (desired_name, (specifier, ty)) in dependencies {
let overridden = overrides.iter().find_map(|(k_path, spec)| {
(path == k_path[..k_path.len() - 1] && k_path.last() == Some(&desired_name))
.then_some(spec)
});
queue.push_back((
desired_name,
overridden.cloned().unwrap_or(specifier),
ty,
Some((pkg_ref.name(), pkg_ref.version().clone())),
path.clone(),
));
}
}
debug!("resolving realms and peer dependencies...");
for (name, versions) in root.children.clone() {
for (version, resolved_package) in versions {
if resolved_package.dep_type == DependencyType::Peer {
return Err(ResolveError::PeerNotInstalled(
resolved_package.pkg_ref.name().to_string(),
resolved_package.pkg_ref.version().clone(),
));
}
let mut realm = resolved_package.realm;
for (dep_name, (dep_version, _)) in &resolved_package.dependencies {
let dep = root.children.get(dep_name).and_then(|v| v.get(dep_version));
if let Some(dep) = dep {
realm = find_realm(&realm, &dep.realm);
}
}
root.children
.get_mut(&name)
.and_then(|v| v.get_mut(&version))
.unwrap()
.realm = realm;
}
}
debug!("finished resolving dependency graph");
Ok(root)
}
}

View file

@ -1,365 +0,0 @@
use std::{
collections::BTreeMap,
fs::{create_dir_all, read},
hash::{DefaultHasher, Hash, Hasher},
io::Cursor,
path::Path,
};
use git2::build::RepoBuilder;
use log::{debug, error};
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use crate::{
dependencies::{maybe_authenticated_request, DependencySpecifier},
index::{remote_callbacks, IndexFileEntry, WallyIndex},
manifest::{DependencyType, ManifestConvertError, Realm},
package_name::{
FromStrPackageNameParseError, WallyPackageName, WallyPackageNameValidationError,
},
project::{get_wally_index, Indices},
};
/// A dependency of a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct WallyDependencySpecifier {
/// The name of the package
#[serde(rename = "wally")]
pub name: WallyPackageName,
/// The version requirement of the package
pub version: VersionReq,
/// The url of the index
pub index_url: Url,
/// The realm of the package
#[serde(skip_serializing_if = "Option::is_none")]
pub realm: Option<Realm>,
}
/// A reference to a package that can be downloaded from a registry
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(deny_unknown_fields)]
pub struct WallyPackageRef {
/// The name of the package
pub name: WallyPackageName,
/// The version of the package
pub version: Version,
/// The index URL of the package
pub index_url: Url,
}
/// An error that occurred while downloading a package from a wally registry
#[derive(Debug, Error)]
pub enum WallyDownloadError {
/// An error that occurred while interacting with reqwest
#[error("error interacting with reqwest")]
Reqwest(#[from] reqwest::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// The package was not found on the registry
#[error("package {0} not found on the registry, but found in the index")]
NotFound(WallyPackageName),
/// The user is unauthorized to download the package
#[error("unauthorized to download package {0}")]
Unauthorized(WallyPackageName),
/// An HTTP error occurred
#[error("http error {0}: the server responded with {1}")]
Http(reqwest::StatusCode, String),
/// An error occurred while extracting the archive
#[error("error extracting archive")]
Zip(#[from] zip::result::ZipError),
/// An error occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error occurred while interacting with serde
#[error("error interacting with serde")]
Serde(#[from] serde_json::Error),
/// An error occurred while parsing the api URL
#[error("error parsing URL")]
Url(#[from] url::ParseError),
/// An error occurred while refreshing the index
#[error("error refreshing index")]
RefreshIndex(#[from] crate::index::RefreshError),
/// An error occurred while converting the manifest
#[error("error converting manifest")]
Manifest(#[from] ManifestConvertError),
}
/// An error that occurred while cloning a wally index
#[derive(Error, Debug)]
pub enum CloneWallyIndexError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while refreshing the index
#[error("error refreshing index")]
RefreshIndex(#[from] crate::index::RefreshError),
}
pub(crate) fn clone_wally_index(
cache_dir: &Path,
indices: &mut Indices,
index_url: &Url,
) -> Result<WallyIndex, CloneWallyIndexError> {
let mut hasher = DefaultHasher::new();
index_url.hash(&mut hasher);
let url_hash = hasher.finish().to_string();
let index_path = cache_dir.join("wally_indices").join(url_hash);
if index_path.exists() {
debug!("wally index already exists at {}", index_path.display());
return Ok(get_wally_index(indices, index_url, Some(&index_path))?.clone());
}
debug!(
"cloning wally index from {} to {}",
index_url,
index_path.display()
);
create_dir_all(&index_path)?;
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks!(get_wally_index(
indices,
index_url,
Some(&index_path)
)?));
RepoBuilder::new()
.fetch_options(fetch_options)
.clone(index_url.as_ref(), &index_path)?;
Ok(get_wally_index(indices, index_url, Some(&index_path))?.clone())
}
/// The configuration of a wally index
#[derive(Serialize, Deserialize, Debug)]
struct WallyIndexConfig {
/// The URL of the wally API
api: String,
}
/// An error that occurred while resolving the URL of a wally package
#[derive(Error, Debug)]
pub enum ResolveWallyUrlError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while interacting with the index
#[error("error interacting with the index")]
Index(#[from] crate::index::ConfigError),
/// An error that occurred while parsing the URL
#[error("error parsing URL")]
Url(#[from] url::ParseError),
/// An error that occurred while cloning the index
#[error("error cloning index")]
CloneIndex(#[from] CloneWallyIndexError),
/// An error that occurred while reading the index config
#[error("error reading index config")]
ReadConfig(#[from] serde_json::Error),
}
fn read_api_url(index_path: &Path) -> Result<String, ResolveWallyUrlError> {
let config_path = index_path.join("config.json");
let raw_config_contents = read(config_path)?;
let config: WallyIndexConfig = serde_json::from_slice(&raw_config_contents)?;
Ok(config.api)
}
impl WallyPackageRef {
/// Resolves the download URL of the package
pub fn resolve_url(
&self,
cache_dir: &Path,
indices: &mut Indices,
) -> Result<Url, ResolveWallyUrlError> {
let index = clone_wally_index(cache_dir, indices, &self.index_url)?;
let api_url = Url::parse(&read_api_url(&index.path)?)?;
let url = format!(
"{}/v1/package-contents/{}/{}/{}",
api_url.to_string().trim_end_matches('/'),
self.name.scope(),
self.name.name(),
self.version
);
Ok(Url::parse(&url)?)
}
/// Downloads the package to the specified destination
pub fn download<P: AsRef<Path>>(
&self,
reqwest_client: &reqwest::blocking::Client,
url: &Url,
registry_auth_token: Option<String>,
dest: P,
) -> Result<(), WallyDownloadError> {
let response =
maybe_authenticated_request(reqwest_client, url.as_str(), registry_auth_token)
.header(
"Wally-Version",
std::env::var("WALLY_VERSION").unwrap_or("0.3.2".to_string()),
)
.send()?;
if !response.status().is_success() {
return match response.status() {
reqwest::StatusCode::NOT_FOUND => {
Err(WallyDownloadError::NotFound(self.name.clone()))
}
reqwest::StatusCode::UNAUTHORIZED => {
Err(WallyDownloadError::Unauthorized(self.name.clone()))
}
_ => Err(WallyDownloadError::Http(
response.status(),
response.text()?,
)),
};
}
let bytes = response.bytes()?;
let mut archive = zip::read::ZipArchive::new(Cursor::new(bytes))?;
archive.extract(dest.as_ref())?;
Ok(())
}
}
#[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct WallyPackage {
pub(crate) name: WallyPackageName,
pub(crate) version: Version,
pub(crate) registry: Url,
#[serde(default)]
pub(crate) realm: Option<Realm>,
#[serde(default)]
pub(crate) description: Option<String>,
#[serde(default)]
pub(crate) license: Option<String>,
#[serde(default)]
pub(crate) authors: Option<Vec<String>>,
#[serde(default)]
pub(crate) private: Option<bool>,
}
#[derive(Deserialize, Default, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct WallyPlace {
#[serde(default)]
pub(crate) shared_packages: Option<String>,
#[serde(default)]
pub(crate) server_packages: Option<String>,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct WallyManifest {
pub(crate) package: WallyPackage,
#[serde(default)]
pub(crate) place: WallyPlace,
#[serde(default)]
pub(crate) dependencies: BTreeMap<String, String>,
#[serde(default)]
pub(crate) server_dependencies: BTreeMap<String, String>,
#[serde(default)]
pub(crate) dev_dependencies: BTreeMap<String, String>,
}
/// An error that occurred while converting a wally manifest's dependencies
#[derive(Debug, Error)]
pub enum WallyManifestDependencyError {
/// An error that occurred because the dependency specifier is invalid
#[error("invalid dependency specifier: {0}")]
InvalidDependencySpecifier(String),
/// An error that occurred while parsing a package name
#[error("error parsing package name")]
PackageName(#[from] FromStrPackageNameParseError<WallyPackageNameValidationError>),
/// An error that occurred while parsing a version requirement
#[error("error parsing version requirement")]
VersionReq(#[from] semver::Error),
}
pub(crate) fn parse_wally_dependencies(
manifest: WallyManifest,
) -> Result<BTreeMap<String, DependencySpecifier>, WallyManifestDependencyError> {
[
(manifest.dependencies, Realm::Shared),
(manifest.server_dependencies, Realm::Server),
(manifest.dev_dependencies, Realm::Development),
]
.into_iter()
.flat_map(|(deps, realm)| {
deps.into_iter()
.map(move |(desired_name, specifier)| (desired_name, specifier, realm))
.map(|(desired_name, specifier, realm)| {
let (name, req) = specifier.split_once('@').ok_or_else(|| {
WallyManifestDependencyError::InvalidDependencySpecifier(specifier.clone())
})?;
let name: WallyPackageName = name.parse()?;
let req: VersionReq = req.parse()?;
Ok((
desired_name,
DependencySpecifier::Wally(WallyDependencySpecifier {
name,
version: req,
index_url: manifest.package.registry.clone(),
realm: Some(realm),
}),
))
})
})
.collect()
}
impl TryFrom<WallyManifest> for IndexFileEntry {
type Error = WallyManifestDependencyError;
fn try_from(value: WallyManifest) -> Result<Self, Self::Error> {
let dependencies = parse_wally_dependencies(value.clone())?
.into_iter()
.map(|(desired_name, specifier)| (desired_name, (specifier, DependencyType::Normal)))
.collect();
Ok(IndexFileEntry {
version: value.package.version,
realm: value.package.realm,
published_at: Default::default(),
description: value.package.description,
dependencies,
})
}
}

View file

@ -1,753 +0,0 @@
use std::{
any::Any,
collections::{BTreeMap, BTreeSet},
fmt::Debug,
fs::create_dir_all,
hash::Hash,
path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc};
use git2::{build::RepoBuilder, Remote, Repository, Signature};
use log::debug;
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use crate::{
dependencies::DependencySpecifier,
manifest::{DependencyType, Manifest, Realm},
package_name::PackageName,
};
/// Owners of a scope
pub type ScopeOwners = BTreeSet<u64>;
/// A packages index
pub trait Index: Send + Sync + Debug + Any + 'static {
/// Gets the owners of a scope
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError>;
/// Creates a scope
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError>;
/// Gets a package from the index
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError>;
/// Creates a package version
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<Option<IndexFileEntry>, CreatePackageVersionError>;
/// Gets the index's configuration
fn config(&self) -> Result<IndexConfig, ConfigError>;
/// Returns a function that gets the credentials for a git repository
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>>;
/// Returns the URL of the index's repository
fn url(&self) -> &Url;
/// Returns the token to this index's registry
fn registry_auth_token(&self) -> Option<&str> {
None
}
/// Updates the index
fn refresh(&self) -> Result<(), RefreshError> {
Ok(())
}
/// Returns this as Any
fn as_any(&self) -> &dyn Any;
}
/// A function that gets the credentials for a git repository
pub type CredentialsFn = Box<
dyn Fn() -> Box<
dyn FnMut(&str, Option<&str>, git2::CredentialType) -> Result<git2::Cred, git2::Error>,
> + Send
+ Sync,
>;
/// The packages index
#[derive(Clone)]
pub struct GitIndex {
path: PathBuf,
repo_url: Url,
registry_auth_token: Option<String>,
pub(crate) credentials_fn: Option<Arc<CredentialsFn>>,
}
impl Debug for GitIndex {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GitIndex")
.field("path", &self.path)
.field("repo_url", &self.repo_url)
.finish()
}
}
impl Hash for GitIndex {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.path.hash(state);
self.repo_url.hash(state);
}
}
impl PartialEq for GitIndex {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.repo_url == other.repo_url
}
}
impl Eq for GitIndex {}
/// An error that occurred while getting the index's refspec
#[derive(Debug, Error)]
pub enum GetRefSpecError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// The refspec for the upstream branch was not found
#[error("refspec not found for upstream branch {0}")]
RefSpecNotFound(String),
/// The refspec is not utf-8
#[error("refspec not utf-8")]
RefSpecNotUtf8,
/// The upstream branch was not found
#[error("upstream branch not found")]
UpstreamBranchNotFound,
/// The upstream branch is not utf-8
#[error("upstream branch not utf-8")]
UpstreamBranchNotUtf8,
}
/// An error that occurred while refreshing the index
#[derive(Debug, Error)]
pub enum RefreshError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting the index's refspec
#[error("error getting refspec")]
GetRefSpec(#[from] GetRefSpecError),
}
/// An error that occurred while interacting with the scope owners
#[derive(Debug, Error)]
pub enum ScopeOwnersError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the scope owners
#[error("error deserializing scope owners")]
ScopeOwnersDeser(#[source] serde_yaml::Error),
/// An error that occurred while committing and pushing to the index
#[error("error committing and pushing to the index")]
CommitAndPush(#[from] CommitAndPushError),
}
/// An error that occurred while committing and pushing to the index
#[derive(Debug, Error)]
pub enum CommitAndPushError {
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting the index's refspec
#[error("error getting refspec")]
GetRefSpec(#[from] GetRefSpecError),
}
/// An error that occurred while getting a package from the index
#[derive(Debug, Error)]
pub enum IndexPackageError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the index file
#[error("error deserializing index file")]
FileDeser(#[source] serde_yaml::Error),
/// An unknown error occurred
#[error("unknown error")]
Other(#[source] Box<dyn std::error::Error + Send + Sync>),
}
/// An error that occurred while creating a package version
#[derive(Debug, Error)]
pub enum CreatePackageVersionError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while getting a package from the index
#[error("error getting a package from the index")]
IndexPackage(#[from] IndexPackageError),
/// An error that occurred while serializing the index file
#[error("error serializing index file")]
FileSer(#[source] serde_yaml::Error),
/// An error that occurred while committing and pushing to the index
#[error("error committing and pushing to the index")]
CommitAndPush(#[from] CommitAndPushError),
/// An error that occurred while interacting with the scope owners
#[error("error interacting with the scope owners")]
ScopeOwners(#[from] ScopeOwnersError),
/// The scope is missing ownership
#[error("missing scope ownership")]
MissingScopeOwnership,
/// An error that occurred while converting a manifest to an index file entry
#[error("error converting manifest to index file entry")]
FromManifestIndexFileEntry(#[from] FromManifestIndexFileEntry),
}
/// An error that occurred while getting the index's configuration
#[derive(Debug, Error)]
pub enum ConfigError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the index config
#[error("error deserializing index config")]
ConfigDeser(#[source] serde_yaml::Error),
/// The index does not have a config file
#[error("index does not have a config file - this is an issue with the index, please contact the maintainer of the index")]
MissingConfig,
}
fn get_refspec(
repo: &Repository,
remote: &mut Remote,
) -> Result<(String, String), GetRefSpecError> {
let upstream_branch_buf = repo.branch_upstream_name(
repo.head()?
.name()
.ok_or(GetRefSpecError::UpstreamBranchNotFound)?,
)?;
let upstream_branch = upstream_branch_buf
.as_str()
.ok_or(GetRefSpecError::UpstreamBranchNotUtf8)?;
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.ok_or(GetRefSpecError::RefSpecNotFound(
upstream_branch.to_string(),
))?
.rtransform(upstream_branch)?;
let refspec = refspec_buf
.as_str()
.ok_or(GetRefSpecError::RefSpecNotUtf8)?;
Ok((refspec.to_string(), upstream_branch.to_string()))
}
macro_rules! remote_callbacks {
($index:expr) => {{
#[allow(unused_imports)]
use crate::index::Index;
let mut remote_callbacks = git2::RemoteCallbacks::new();
if let Some(credentials) = &$index.credentials_fn() {
let credentials = std::sync::Arc::clone(credentials);
remote_callbacks.credentials(move |a, b, c| credentials()(a, b, c));
}
remote_callbacks
}};
}
pub(crate) use remote_callbacks;
impl GitIndex {
/// Creates a new git index. The `refresh` method must be called before using the index, preferably immediately after creating it.
pub fn new<P: AsRef<Path>>(
path: P,
repo_url: &Url,
credentials: Option<CredentialsFn>,
registry_auth_token: Option<String>,
) -> Self {
Self {
path: path.as_ref().to_path_buf(),
repo_url: repo_url.clone(),
credentials_fn: credentials.map(Arc::new),
registry_auth_token,
}
}
/// Gets the path of the index
pub fn path(&self) -> &Path {
&self.path
}
/// Commits and pushes to the index
pub fn commit_and_push(
&self,
message: &str,
signature: &Signature,
) -> Result<(), CommitAndPushError> {
let repo = Repository::open(&self.path)?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let oid = index.write_tree()?;
let tree = repo.find_tree(oid)?;
let parent_commit = repo.head()?.peel_to_commit()?;
repo.commit(
Some("HEAD"),
signature,
signature,
message,
&tree,
&[&parent_commit],
)?;
let mut remote = repo.find_remote("origin")?;
let (refspec, _) = get_refspec(&repo, &mut remote)?;
remote.push(
&[&refspec],
Some(git2::PushOptions::new().remote_callbacks(remote_callbacks!(self))),
)?;
Ok(())
}
}
macro_rules! refresh_git_based_index {
($index:expr) => {{
let repo = if $index.path.exists() {
Repository::open(&$index.path).ok()
} else {
None
};
if let Some(repo) = repo {
let mut remote = repo.find_remote("origin")?;
let (refspec, upstream_branch) = get_refspec(&repo, &mut remote)?;
remote.fetch(
&[&refspec],
Some(git2::FetchOptions::new().remote_callbacks(remote_callbacks!($index))),
None,
)?;
let commit = repo.find_reference(&upstream_branch)?.peel_to_commit()?;
debug!(
"refreshing index, fetching {refspec}#{} from origin",
commit.id().to_string()
);
repo.reset(&commit.into_object(), git2::ResetType::Hard, None)?;
Ok(())
} else {
debug!(
"refreshing index - first time, cloning {} into {}",
$index.repo_url,
$index.path.display()
);
create_dir_all(&$index.path)?;
let mut fetch_options = git2::FetchOptions::new();
fetch_options.remote_callbacks(remote_callbacks!($index));
RepoBuilder::new()
.fetch_options(fetch_options)
.clone(&$index.repo_url.to_string(), &$index.path)?;
Ok(())
}
}};
}
impl Index for GitIndex {
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError> {
let path = self.path.join(scope).join("owners.yaml");
if !path.exists() {
return Ok(None);
}
let contents = std::fs::read(&path)?;
let owners: ScopeOwners =
serde_yaml::from_slice(&contents).map_err(ScopeOwnersError::ScopeOwnersDeser)?;
Ok(Some(owners))
}
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError> {
let path = self.path.join(scope);
if path.exists() {
return Ok(false);
}
create_dir_all(&path)?;
serde_yaml::to_writer(std::fs::File::create(path.join("owners.yaml"))?, owners)
.map_err(ScopeOwnersError::ScopeOwnersDeser)?;
Ok(true)
}
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError> {
let path = self.path.join(name.scope()).join(name.name());
if !path.exists() {
return Ok(None);
}
let contents = std::fs::read(&path)?;
let file: IndexFile =
serde_yaml::from_slice(&contents).map_err(IndexPackageError::FileDeser)?;
Ok(Some(file))
}
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<Option<IndexFileEntry>, CreatePackageVersionError> {
let scope = manifest.name.scope();
if let Some(owners) = self.scope_owners(scope)? {
if !owners.contains(uploader) {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
} else if !self.create_scope_for(scope, &BTreeSet::from([*uploader]))? {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
let path = self.path.join(scope);
let mut file =
if let Some(file) = self.package(&PackageName::Standard(manifest.name.clone()))? {
if file.iter().any(|e| e.version == manifest.version) {
return Ok(None);
}
file
} else {
BTreeSet::new()
};
let entry: IndexFileEntry = manifest.clone().try_into()?;
file.insert(entry.clone());
serde_yaml::to_writer(
std::fs::File::create(path.join(manifest.name.name()))?,
&file,
)
.map_err(CreatePackageVersionError::FileSer)?;
Ok(Some(entry))
}
fn config(&self) -> Result<IndexConfig, ConfigError> {
let path = self.path.join("config.yaml");
if !path.exists() {
return Err(ConfigError::MissingConfig);
}
let contents = std::fs::read(&path)?;
let config: IndexConfig =
serde_yaml::from_slice(&contents).map_err(ConfigError::ConfigDeser)?;
Ok(config)
}
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>> {
self.credentials_fn.as_ref()
}
fn url(&self) -> &Url {
&self.repo_url
}
fn registry_auth_token(&self) -> Option<&str> {
self.registry_auth_token.as_deref()
}
fn refresh(&self) -> Result<(), RefreshError> {
refresh_git_based_index!(self)
}
fn as_any(&self) -> &dyn Any {
self
}
}
/// The configuration of the index
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct IndexConfig {
/// The URL of the index's API
pub api: Url,
/// The URL of the index's download API, defaults to `{API_URL}/v0/packages/{PACKAGE_AUTHOR}/{PACKAGE_NAME}/{PACKAGE_VERSION}`.
/// Has the following variables:
/// - `{API_URL}`: The URL of the index's API (without trailing `/`)
/// - `{PACKAGE_AUTHOR}`: The author of the package
/// - `{PACKAGE_NAME}`: The name of the package
/// - `{PACKAGE_VERSION}`: The version of the package
pub download: Option<String>,
/// Whether to allow git dependencies
#[serde(default)]
pub git_allowed: bool,
/// Whether to allow custom registries
#[serde(default)]
pub custom_registry_allowed: bool,
/// The OAuth client ID for GitHub OAuth
pub github_oauth_client_id: String,
}
impl IndexConfig {
/// Gets the URL of the index's API
pub fn api(&self) -> &str {
self.api.as_str().trim_end_matches('/')
}
/// Gets the URL of the index's download API
pub fn download(&self) -> String {
self.download
.as_ref()
.unwrap_or(
&"{API_URL}/v0/packages/{PACKAGE_AUTHOR}/{PACKAGE_NAME}/{PACKAGE_VERSION}"
.to_string(),
)
.replace("{API_URL}", self.api())
}
}
/// An entry in the index file
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub struct IndexFileEntry {
/// The version of the package
pub version: Version,
/// The realm of the package
pub realm: Option<Realm>,
/// When the package was published
#[serde(default = "Utc::now")]
pub published_at: DateTime<Utc>,
/// A description of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifier, DependencyType)>,
}
/// An error that occurred while converting a manifest to an index file entry
#[derive(Debug, Error)]
pub enum FromManifestIndexFileEntry {
/// An error that occurred because an index is not specified
#[error("index {0} is not specified")]
IndexNotSpecified(String),
}
impl TryFrom<Manifest> for IndexFileEntry {
type Error = FromManifestIndexFileEntry;
fn try_from(manifest: Manifest) -> Result<Self, Self::Error> {
let dependencies = manifest.dependencies();
let indices = manifest.indices;
Ok(Self {
version: manifest.version,
realm: manifest.realm,
published_at: Utc::now(),
description: manifest.description,
dependencies: dependencies
.into_iter()
.map(|(desired_name, (dep, ty))| {
Ok((
desired_name,
match dep {
DependencySpecifier::Registry(mut registry) => {
registry.index = indices
.get(&registry.index)
.ok_or_else(|| {
FromManifestIndexFileEntry::IndexNotSpecified(
registry.index.clone(),
)
})?
.clone();
(DependencySpecifier::Registry(registry), ty)
}
d => (d, ty),
},
))
})
.collect::<Result<_, _>>()?,
})
}
}
impl PartialOrd for IndexFileEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.version.cmp(&other.version))
}
}
impl Ord for IndexFileEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.version.cmp(&other.version)
}
}
/// An index file
pub type IndexFile = BTreeSet<IndexFileEntry>;
#[cfg(feature = "wally")]
#[derive(Clone)]
pub(crate) struct WallyIndex {
repo_url: Url,
registry_auth_token: Option<String>,
credentials_fn: Option<Arc<CredentialsFn>>,
pub(crate) path: PathBuf,
}
#[cfg(feature = "wally")]
impl Debug for WallyIndex {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("WallyIndex")
.field("path", &self.path)
.field("repo_url", &self.repo_url)
.finish()
}
}
#[cfg(feature = "wally")]
impl WallyIndex {
pub(crate) fn new(
repo_url: Url,
registry_auth_token: Option<String>,
path: &Path,
credentials_fn: Option<Arc<CredentialsFn>>,
) -> Self {
Self {
repo_url,
registry_auth_token,
path: path.to_path_buf(),
credentials_fn,
}
}
}
#[cfg(feature = "wally")]
impl Index for WallyIndex {
fn scope_owners(&self, _scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError> {
unimplemented!("wally index is a virtual index meant for wally compatibility only")
}
fn create_scope_for(
&mut self,
_scope: &str,
_owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError> {
unimplemented!("wally index is a virtual index meant for wally compatibility only")
}
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError> {
let path = self.path.join(name.scope()).join(name.name());
if !path.exists() {
return Ok(None);
}
let file = std::fs::File::open(&path)?;
let file = std::io::BufReader::new(file);
let manifest_stream = serde_json::Deserializer::from_reader(file)
.into_iter::<crate::dependencies::wally::WallyManifest>()
.collect::<Result<Vec<_>, _>>()
.map_err(|e| IndexPackageError::Other(Box::new(e)))?;
Ok(Some(
manifest_stream
.into_iter()
.map(|m| m.try_into())
.collect::<Result<BTreeSet<_>, _>>()
.map_err(|e| IndexPackageError::Other(Box::new(e)))?,
))
}
fn create_package_version(
&mut self,
_manifest: &Manifest,
_uploader: &u64,
) -> Result<Option<IndexFileEntry>, CreatePackageVersionError> {
unimplemented!("wally index is a virtual index meant for wally compatibility only")
}
fn config(&self) -> Result<IndexConfig, ConfigError> {
unimplemented!("wally index is a virtual index meant for wally compatibility only")
}
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>> {
self.credentials_fn.as_ref()
}
fn url(&self) -> &Url {
&self.repo_url
}
fn registry_auth_token(&self) -> Option<&str> {
self.registry_auth_token.as_deref()
}
fn refresh(&self) -> Result<(), RefreshError> {
refresh_git_based_index!(self)
}
fn as_any(&self) -> &dyn Any {
self
}
}

View file

@ -1,51 +1,160 @@
#![deny(missing_docs)]
//! pesde is a package manager for Roblox that is designed to be feature-rich and easy to use.
//! Currently, pesde is in a very early stage of development, but already supports the following features:
//! - Managing dependencies
//! - Re-exporting types
//! - `bin` exports (ran with Lune)
//! - Patching packages
//! - Downloading packages from Wally registries
// #![deny(missing_docs)] - TODO: bring this back before publishing 0.5
/// Resolving, downloading and managing dependencies
pub mod dependencies;
/// Managing the pesde index
pub mod index;
/// Creating linking files ('re-export' modules)
pub mod linking_file;
/// Managing the pesde manifest
#[cfg(not(any(feature = "roblox", feature = "lune", feature = "luau")))]
compile_error!("at least one of the features `roblox`, `lune`, or `luau` must be enabled");
use once_cell::sync::Lazy;
use std::path::{Path, PathBuf};
pub mod lockfile;
pub mod manifest;
/// Multi-threading utilities
pub mod multithread;
/// Creating, parsing, and validating package names
pub mod package_name;
/// Managing patches
pub mod patches;
/// Managing pesde projects
pub mod project;
pub mod names;
pub mod source;
/// The folder that contains shared packages
pub const PACKAGES_FOLDER: &str = "packages";
/// The folder that contains dev packages
pub const DEV_PACKAGES_FOLDER: &str = "dev_packages";
/// The folder that contains server packages
pub const SERVER_PACKAGES_FOLDER: &str = "server_packages";
/// The folder that contains the packages index (where every package is stored after being downloaded)
pub const INDEX_FOLDER: &str = "pesde_index";
/// The name of the manifest file
pub const MANIFEST_FILE_NAME: &str = "pesde.yaml";
/// The name of the lockfile
pub const LOCKFILE_FILE_NAME: &str = "pesde-lock.yaml";
/// The name of the patches folder
pub const PATCHES_FOLDER: &str = "patches";
/// Files to be ignored when publishing
pub const IGNORED_FOLDERS: &[&str] = &[
PACKAGES_FOLDER,
DEV_PACKAGES_FOLDER,
SERVER_PACKAGES_FOLDER,
".git",
];
pub const LOCKFILE_FILE_NAME: &str = "pesde.lock";
pub(crate) fn is_default<T: Default + PartialEq>(t: &T) -> bool {
t == &Default::default()
pub(crate) static REQWEST_CLIENT: Lazy<reqwest::blocking::Client> = Lazy::new(|| {
reqwest::blocking::Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.build()
.expect("failed to create reqwest client")
});
#[derive(Debug, Clone)]
pub struct GitAccount {
username: String,
password: secrecy::SecretString,
}
impl GitAccount {
pub fn new<S: Into<secrecy::SecretString>>(username: String, password: S) -> Self {
GitAccount {
username,
password: password.into(),
}
}
pub fn as_account(&self) -> gix::sec::identity::Account {
use secrecy::ExposeSecret;
gix::sec::identity::Account {
username: self.username.clone(),
password: self.password.expose_secret().to_string(),
}
}
}
impl From<gix::sec::identity::Account> for GitAccount {
fn from(account: gix::sec::identity::Account) -> Self {
GitAccount {
username: account.username,
password: account.password.into(),
}
}
}
#[derive(Debug, Default, Clone)]
pub struct AuthConfig {
pesde_token: Option<secrecy::SecretString>,
git_credentials: Option<GitAccount>,
}
impl AuthConfig {
pub fn new() -> Self {
AuthConfig::default()
}
pub fn with_pesde_token<S: Into<secrecy::SecretString>>(mut self, token: Option<S>) -> Self {
self.pesde_token = token.map(Into::into);
self
}
pub fn with_git_credentials(mut self, git_credentials: Option<GitAccount>) -> Self {
self.git_credentials = git_credentials;
self
}
}
pub(crate) fn authenticate_conn(
conn: &mut gix::remote::Connection<
'_,
'_,
Box<dyn gix::protocol::transport::client::Transport + Send>,
>,
auth_config: AuthConfig,
) {
if let Some(iden) = auth_config.git_credentials {
conn.set_credentials(move |action| match action {
gix::credentials::helper::Action::Get(ctx) => {
Ok(Some(gix::credentials::protocol::Outcome {
identity: iden.as_account(),
next: gix::credentials::helper::NextAction::from(ctx),
}))
}
gix::credentials::helper::Action::Store(_) => Ok(None),
gix::credentials::helper::Action::Erase(_) => Ok(None),
});
}
}
#[derive(Debug)]
pub struct Project {
path: PathBuf,
data_dir: PathBuf,
auth_config: AuthConfig,
}
impl Project {
pub fn new<P: AsRef<Path>, Q: AsRef<Path>>(
path: P,
data_dir: Q,
auth_config: AuthConfig,
) -> Self {
Project {
path: path.as_ref().to_path_buf(),
data_dir: data_dir.as_ref().to_path_buf(),
auth_config,
}
}
pub fn path(&self) -> &Path {
&self.path
}
pub fn data_dir(&self) -> &Path {
&self.data_dir
}
pub fn read_manifest(&self) -> Result<Vec<u8>, errors::ManifestReadError> {
let bytes = std::fs::read(self.path.join(MANIFEST_FILE_NAME))?;
Ok(bytes)
}
pub fn deser_manifest(&self) -> Result<manifest::Manifest, errors::ManifestReadError> {
let bytes = std::fs::read(self.path.join(MANIFEST_FILE_NAME))?;
Ok(serde_yaml::from_slice(&bytes)?)
}
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
std::fs::write(self.path.join(MANIFEST_FILE_NAME), manifest.as_ref())
}
}
pub mod errors {
use thiserror::Error;
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ManifestReadError {
#[error("io error reading manifest file")]
Io(#[from] std::io::Error),
#[error("error deserializing manifest file")]
Serde(#[from] serde_yaml::Error),
}
}

View file

@ -1,317 +0,0 @@
use std::{
collections::HashSet,
fs::{create_dir_all, read_to_string, write},
path::{Component, Path, PathBuf},
};
use full_moon::{
ast::types::ExportedTypeDeclaration,
parse,
visitors::{Visit, Visitor},
};
use log::debug;
use semver::Version;
use thiserror::Error;
use crate::{
dependencies::resolution::{packages_folder, ResolvedPackage, RootLockfileNode},
manifest::{Manifest, ManifestReadError, PathStyle, Realm},
package_name::PackageName,
project::Project,
};
struct TypeVisitor {
pub(crate) types: Vec<String>,
}
impl Visitor for TypeVisitor {
fn visit_exported_type_declaration(&mut self, node: &ExportedTypeDeclaration) {
let name = node.type_declaration().type_name().to_string();
let (declaration_generics, generics) =
if let Some(declaration) = node.type_declaration().generics() {
let mut declaration_generics = vec![];
let mut generics = vec![];
for generic in declaration.generics().iter() {
declaration_generics.push(generic.to_string());
if generic.default_type().is_some() {
generics.push(generic.parameter().to_string())
} else {
generics.push(generic.to_string())
}
}
(
format!("<{}>", declaration_generics.join(", ")),
format!("<{}>", generics.join(", ")),
)
} else {
("".to_string(), "".to_string())
};
self.types.push(format!(
"export type {name}{declaration_generics} = module.{name}{generics}\n"
));
}
}
/// Generates the contents of a linking file, given the require path, and the contents of the target file
/// The contents will be scanned for type exports, and the linking file will be generated accordingly
pub fn linking_file(content: &str, path: &str) -> Result<String, full_moon::Error> {
let mut linker = format!("local module = require({path})\n");
let mut visitor = TypeVisitor { types: vec![] };
parse(content)?.nodes().visit(&mut visitor);
for ty in visitor.types {
linker.push_str(&ty);
}
linker.push_str("return module");
Ok(linker)
}
#[derive(Debug, Error)]
/// An error that occurred while linking dependencies
pub enum LinkingError {
#[error("error interacting with the file system")]
/// An error that occurred while interacting with the file system
Io(#[from] std::io::Error),
#[error("failed getting file name from {0}")]
/// An error that occurred while getting a file name
FileNameFail(PathBuf),
#[error("failed converting file name to string")]
/// An error that occurred while converting a file name to a string
FileNameToStringFail,
#[error("failed getting relative path from {0} to {1}")]
/// An error that occurred while getting a relative path
RelativePathFail(PathBuf, PathBuf),
#[error("failed getting path parent of {0}")]
/// An error that occurred while getting a path parent
ParentFail(PathBuf),
#[error("failed to convert path component to string")]
/// An error that occurred while converting a path component to a string
ComponentToStringFail,
#[error("failed to get path string")]
/// An error that occurred while getting a path string
PathToStringFail,
#[error("error encoding utf-8 string")]
/// An error that occurred while converting a byte slice to a string
Utf8(#[from] std::str::Utf8Error),
#[error("error reading manifest")]
/// An error that occurred while reading the manifest of a package
ManifestRead(#[from] ManifestReadError),
#[error("missing realm {0} in-game path")]
/// An error that occurred while getting the in-game path for a realm
MissingRealmInGamePath(Realm),
#[error("library source is not valid Luau")]
/// An error that occurred because the library source is not valid Luau
InvalidLuau(#[from] full_moon::Error),
}
pub(crate) fn link<P: AsRef<Path>, Q: AsRef<Path>>(
project: &Project,
resolved_pkg: &ResolvedPackage,
lockfile: &RootLockfileNode,
destination_dir: P,
parent_dependency_packages_dir: Q,
desired_name: &str,
as_root: bool,
) -> Result<(), LinkingError> {
let (_, source_dir) = resolved_pkg.directory(project.path());
let file = Manifest::from_path(&source_dir)?;
let Some(relative_lib_export) = file.exports.lib else {
return Ok(());
};
let lib_export = relative_lib_export.to_path(&source_dir);
let path_style = &project.manifest().path_style;
let PathStyle::Roblox { place } = &path_style;
debug!("linking {resolved_pkg} using `{}` path style", path_style);
let pkg_name = resolved_pkg.pkg_ref.name();
let name = pkg_name.name();
let destination_dir = match lockfile
.specifiers
.get(&pkg_name)
.and_then(|v| v.get(resolved_pkg.pkg_ref.version()))
{
Some((specifier, _)) if as_root => project.path().join(packages_folder(
specifier.realm().copied().unwrap_or_default(),
)),
_ => destination_dir.as_ref().to_path_buf(),
};
create_dir_all(&destination_dir)?;
let destination_file = destination_dir.join(desired_name.to_string() + ".lua");
let realm_folder = project.path().join(resolved_pkg.packages_folder());
let in_different_folders = realm_folder != parent_dependency_packages_dir.as_ref();
let mut path = if in_different_folders {
pathdiff::diff_paths(&source_dir, &realm_folder)
.ok_or_else(|| LinkingError::RelativePathFail(source_dir.clone(), realm_folder))?
} else {
pathdiff::diff_paths(&source_dir, &destination_dir).ok_or_else(|| {
LinkingError::RelativePathFail(source_dir.clone(), destination_dir.to_path_buf())
})?
};
path.set_extension("");
let beginning = if in_different_folders {
place
.get(&resolved_pkg.realm)
.ok_or_else(|| LinkingError::MissingRealmInGamePath(resolved_pkg.realm))?
.clone()
} else if name == "init" {
"script".to_string()
} else {
"script.Parent".to_string()
};
let mut components = path
.components()
.map(|component| {
Ok(match component {
Component::ParentDir => ".Parent".to_string(),
Component::Normal(part) => format!(
"[{:?}]",
part.to_str().ok_or(LinkingError::ComponentToStringFail)?
),
_ => unreachable!("invalid path component"),
})
})
.collect::<Result<Vec<_>, LinkingError>>()?;
components.pop();
let path = beginning + &components.join("") + &format!("[{name:?}]");
debug!(
"writing linking file for {} with import `{path}` to {}",
source_dir.display(),
destination_file.display()
);
let file_contents = match relative_lib_export.as_str() {
"true" => "".to_string(),
_ => read_to_string(lib_export)?,
};
let linking_file_contents = linking_file(&file_contents, &path)?;
write(&destination_file, linking_file_contents)?;
Ok(())
}
#[derive(Debug, Error)]
#[error("error linking {1}@{2} to {3}@{4}")]
/// An error that occurred while linking the dependencies
pub struct LinkingDependenciesError(
#[source] LinkingError,
PackageName,
Version,
PackageName,
Version,
);
impl Project {
/// Links the dependencies of the project
pub fn link_dependencies(
&self,
lockfile: &RootLockfileNode,
) -> Result<(), LinkingDependenciesError> {
let root_deps = lockfile
.specifiers
.iter()
.flat_map(|(name, versions)| versions.keys().map(|version| (name.clone(), version)))
.collect::<HashSet<_>>();
for (name, versions) in &lockfile.children {
for (version, resolved_pkg) in versions {
let (container_dir, _) = resolved_pkg.directory(self.path());
debug!(
"linking package {name}@{version}'s dependencies to directory {}",
container_dir.display()
);
for (dep_name, (dep_version, desired_name)) in &resolved_pkg.dependencies {
let dep = lockfile
.children
.get(dep_name)
.and_then(|versions| versions.get(dep_version))
.unwrap();
link(
self,
dep,
lockfile,
&container_dir,
&self.path().join(resolved_pkg.packages_folder()),
desired_name,
false,
)
.map_err(|e| {
LinkingDependenciesError(
e,
dep_name.clone(),
dep_version.clone(),
name.clone(),
version.clone(),
)
})?;
}
if root_deps.contains(&(name.clone(), version)) {
let (specifier, desired_name) = lockfile.root_specifier(resolved_pkg).unwrap();
let linking_dir = &self.path().join(packages_folder(
specifier.realm().copied().unwrap_or_default(),
));
debug!(
"linking root package {name}@{version} to directory {}",
linking_dir.display()
);
link(
self,
resolved_pkg,
lockfile,
linking_dir,
self.path().join(resolved_pkg.packages_folder()),
desired_name,
true,
)
.map_err(|e| {
LinkingDependenciesError(
e,
name.clone(),
version.clone(),
name.clone(),
version.clone(),
)
})?;
}
}
}
Ok(())
}
}

20
src/lockfile.rs Normal file
View file

@ -0,0 +1,20 @@
use crate::{
names::{PackageName, PackageNames},
source::{DependencySpecifiers, PackageRefs},
};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Lockfile {
pub name: PackageName,
pub specifiers: BTreeMap<PackageNames, BTreeMap<Version, DependencySpecifiers>>,
pub dependencies: BTreeMap<PackageNames, BTreeMap<Version, LockfileNode>>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LockfileNode {
pub pkg_ref: PackageRefs,
}

View file

@ -1,17 +1,48 @@
use once_cell::sync::Lazy;
use cli::{auth::auth_command, config::config_command, root::root_command};
use crate::cli::{CliConfig, Command, CLI, MULTI};
use crate::cli::get_token;
use clap::Parser;
use colored::Colorize;
use pesde::{AuthConfig, Project};
mod cli;
fn main() -> anyhow::Result<()> {
Lazy::force(&MULTI);
#[derive(Parser, Debug)]
#[clap(version, about = "pesde is a feature-rich package manager for Luau")]
#[command(disable_version_flag = true)]
struct Cli {
/// Print version
#[arg(short = 'v', short_alias = 'V', long, action = clap::builder::ArgAction::Version)]
version: (),
match CLI.command.clone() {
Command::Auth { command } => auth_command(command),
Command::Config { command } => config_command(command),
cmd => root_command(cmd),
#[command(subcommand)]
subcommand: cli::SubCommand,
}
fn main() {
let project_dirs =
directories::ProjectDirs::from("com", env!("CARGO_PKG_NAME"), env!("CARGO_BIN_NAME"))
.expect("couldn't get home directory");
let cwd = std::env::current_dir().expect("failed to get current working directory");
let cli = Cli::parse();
let data_dir = project_dirs.data_dir();
if let Err(err) = get_token(data_dir).and_then(|token| {
cli.subcommand.run(Project::new(
cwd,
data_dir,
AuthConfig::new().with_pesde_token(token),
))
}) {
eprintln!("{}: {}\n", "error".red().bold(), err.to_string().bold());
let cause = err.chain().skip(1).collect::<Vec<_>>();
if !cause.is_empty() {
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!(" - {}", err.to_string().bold());
}
}
std::process::exit(1);
}
}

View file

@ -1,415 +1,174 @@
use std::{collections::BTreeMap, fmt::Display, fs::read, path::Path, str::FromStr};
use cfg_if::cfg_if;
use crate::{names::PackageName, source::DependencySpecifiers};
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{
dependencies::DependencySpecifier, package_name::StandardPackageName, MANIFEST_FILE_NAME,
use serde::{de::Visitor, Deserialize, Deserializer, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{
collections::BTreeMap,
fmt::{Display, Formatter},
str::FromStr,
};
/// The files exported by the package
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(deny_unknown_fields)]
pub struct Exports {
/// Points to the file which exports the package. As of currently this is only used for re-exporting types.
/// Libraries must have a structure in Roblox where the main file becomes the folder, for example:
/// A package called pesde/lib has a file called src/main.lua.
/// pesde puts this package in a folder called pesde_lib.
/// The package has to have set up configuration for file-syncing tools such as Rojo so that src/main.lua becomes the pesde_lib and turns it into a ModuleScript
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lib: Option<RelativePathBuf>,
/// Points to the file that will be executed with Lune
#[serde(default, skip_serializing_if = "Option::is_none")]
pub bin: Option<RelativePathBuf>,
}
/// The path style used by the package
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub enum PathStyle {
/// The path style used by Roblox (e.g. `script.Parent` or `script.Parent.Parent`)
Roblox {
/// A map of realm to in-game package folder location (used for linking between packages in different realms)
#[serde(default)]
place: BTreeMap<Realm, String>,
},
pub enum Target {
#[cfg(feature = "roblox")]
Roblox,
#[cfg(feature = "lune")]
Lune,
#[cfg(feature = "luau")]
Luau,
}
impl Default for PathStyle {
fn default() -> Self {
PathStyle::Roblox {
place: BTreeMap::new(),
}
}
}
impl Display for PathStyle {
impl Display for Target {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PathStyle::Roblox { .. } => write!(f, "roblox"),
#[cfg(feature = "roblox")]
Target::Roblox => write!(f, "roblox"),
#[cfg(feature = "lune")]
Target::Lune => write!(f, "lune"),
#[cfg(feature = "luau")]
Target::Luau => write!(f, "luau"),
}
}
}
impl Target {
// self is the project's target, dependency is the target of the dependency
fn is_compatible_with(&self, dependency: &Self) -> bool {
if self == dependency {
return true;
}
match (self, dependency) {
#[cfg(all(feature = "lune", feature = "luau"))]
(Target::Lune, Target::Luau) => true,
_ => false,
}
}
}
/// The realm of the package
#[derive(
Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Copy, Default,
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub enum Realm {
/// The package is shared (usually ReplicatedStorage)
#[default]
Shared,
/// The package is server only (usually ServerScriptService/ServerStorage)
Server,
/// The package is development only
Development,
}
impl Realm {
/// Returns the most restrictive realm
pub fn or(self, other: Self) -> Self {
match self {
Realm::Shared => other,
_ => self,
}
}
}
impl Display for Realm {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Realm::Shared => write!(f, "shared"),
Realm::Server => write!(f, "server"),
Realm::Development => write!(f, "development"),
}
}
}
/// An error that occurred while parsing a realm from a string
#[derive(Debug, Error)]
#[error("invalid realm {0}")]
pub struct FromStrRealmError(String);
impl FromStr for Realm {
type Err = FromStrRealmError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"shared" => Ok(Realm::Shared),
"server" => Ok(Realm::Server),
"development" => Ok(Realm::Development),
_ => Err(FromStrRealmError(s.to_string())),
}
}
}
/// A key to override dependencies
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct OverrideKey(pub Vec<Vec<String>>);
impl Serialize for OverrideKey {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(
&self
.0
impl FromStr for OverrideKey {
type Err = errors::OverrideKeyFromStr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(
s.split(',')
.map(|overrides| overrides.split('>').map(|s| s.to_string()).collect())
.collect(),
))
}
}
impl Display for OverrideKey {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
self.0
.iter()
.map(|overrides| {
overrides
.iter()
.map(String::to_string)
.map(|o| o.as_str())
.collect::<Vec<_>>()
.join(">")
})
.collect::<Vec<_>>()
.join(","),
.join(",")
)
}
}
impl<'de> Deserialize<'de> for OverrideKey {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s = String::deserialize(deserializer)?;
let mut key = Vec::new();
for overrides in s.split(',') {
key.push(
overrides
.split('>')
.map(|s| String::from_str(s).map_err(serde::de::Error::custom))
.collect::<Result<Vec<_>, _>>()?,
);
fn deserialize_dep_specs<'de, D>(
deserializer: D,
) -> Result<BTreeMap<String, DependencySpecifiers>, D::Error>
where
D: Deserializer<'de>,
{
struct SpecsVisitor;
impl<'de> Visitor<'de> for SpecsVisitor {
type Value = BTreeMap<String, DependencySpecifiers>;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("a map of dependency specifiers")
}
Ok(OverrideKey(key))
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
let mut specs = BTreeMap::new();
while let Some((key, mut value)) = map.next_entry::<String, DependencySpecifiers>()? {
value.set_alias(key.to_string());
specs.insert(key, value);
}
Ok(specs)
}
}
deserializer.deserialize_map(SpecsVisitor)
}
/// The manifest of a package
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Manifest {
/// The name of the package
pub name: StandardPackageName,
/// The version of the package. Must be [semver](https://semver.org) compatible. The registry will not accept non-semver versions and the CLI will not handle such packages
pub name: PackageName,
pub version: Version,
/// A short description of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub description: Option<String>,
/// The license of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub license: Option<String>,
/// The authors of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub authors: Option<Vec<String>>,
/// The repository of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub repository: Option<String>,
/// The files exported by the package
#[serde(default)]
pub exports: Exports,
/// The path style to use for linking modules
#[serde(default)]
pub path_style: PathStyle,
/// Whether the package is private (it should not be published)
pub target: Target,
#[serde(default)]
pub private: bool,
/// The realm of the package
#[serde(default, skip_serializing_if = "Option::is_none")]
pub realm: Option<Realm>,
/// Indices of the package
pub indices: BTreeMap<String, String>,
/// The command to generate a `sourcemap.json`
#[serde(default)]
pub indices: BTreeMap<String, url::Url>,
#[cfg(feature = "wally")]
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub wally_indices: BTreeMap<String, url::Url>,
#[cfg(feature = "wally")]
#[serde(default)]
pub sourcemap_generator: Option<String>,
/// Dependency overrides
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub overrides: BTreeMap<OverrideKey, DependencySpecifier>,
#[serde(default)]
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, DependencySpecifier>,
/// The peer dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub peer_dependencies: BTreeMap<String, DependencySpecifier>,
#[serde(default, deserialize_with = "deserialize_dep_specs")]
pub dependencies: BTreeMap<String, DependencySpecifiers>,
#[serde(default, deserialize_with = "deserialize_dep_specs")]
pub peer_dependencies: BTreeMap<String, DependencySpecifiers>,
#[serde(default, deserialize_with = "deserialize_dep_specs")]
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
}
/// An error that occurred while reading the manifest
#[derive(Debug, Error)]
pub enum ManifestReadError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
pub mod errors {
use thiserror::Error;
/// An error that occurred while deserializing the manifest
#[error("error deserializing manifest")]
ManifestDeser(#[source] serde_yaml::Error),
}
cfg_if! {
if #[cfg(feature = "wally")] {
/// An error that occurred while converting the manifest
#[derive(Debug, Error)]
pub enum ManifestConvertError {
/// An error that occurred while reading the manifest
#[error("error reading the manifest")]
ManifestRead(#[from] ManifestReadError),
/// An error that occurred while converting the manifest
#[error("error converting the manifest")]
ManifestConvert(#[source] toml::de::Error),
/// The given path does not have a parent
#[error("the path {0} does not have a parent")]
NoParent(std::path::PathBuf),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while writing the manifest
#[error("error writing the manifest")]
ManifestWrite(#[from] crate::manifest::ManifestWriteError),
/// An error that occurred while parsing the dependencies
#[error("error parsing the dependencies")]
DependencyParse(#[from] crate::dependencies::wally::WallyManifestDependencyError),
}
} else {
/// An error that occurred while converting the manifest
pub type ManifestConvertError = ManifestReadError;
}
}
/// An error that occurred while writing the manifest
#[derive(Debug, Error)]
pub enum ManifestWriteError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while serializing the manifest
#[error("error serializing manifest")]
ManifestSer(#[from] serde_yaml::Error),
}
/// The type of dependency
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[serde(rename_all = "snake_case")]
pub enum DependencyType {
/// A normal dependency
#[default]
Normal,
/// A peer dependency
Peer,
}
pub(crate) fn update_sync_tool_files(project_path: &Path, name: String) -> std::io::Result<()> {
if let Ok(file) = std::fs::File::open(project_path.join("default.project.json")) {
let mut project: serde_json::Value = serde_json::from_reader(file)?;
if project["name"].as_str() == Some(&name) {
return Ok(());
}
project["name"] = serde_json::Value::String(name);
serde_json::to_writer_pretty(
std::fs::File::create(project_path.join("default.project.json"))?,
&project,
)?;
}
Ok(())
}
impl Manifest {
/// Reads a manifest from a path (if the path is a directory, it will look for the manifest file inside it, otherwise it will read the file directly)
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Result<Self, ManifestReadError> {
let path = path.as_ref();
let path = if path.file_name() == Some(MANIFEST_FILE_NAME.as_ref()) {
path.to_path_buf()
} else {
path.join(MANIFEST_FILE_NAME)
};
let raw_contents = read(path)?;
let manifest =
serde_yaml::from_slice(&raw_contents).map_err(ManifestReadError::ManifestDeser)?;
Ok(manifest)
}
/// Tries to read the manifest from the given path, and if it fails, tries converting the `wally.toml` and writes a `pesde.yaml` in the same directory
#[cfg(feature = "wally")]
pub fn from_path_or_convert<P: AsRef<std::path::Path>>(
path: P,
) -> Result<Self, ManifestConvertError> {
let dir_path = if path.as_ref().file_name() == Some(MANIFEST_FILE_NAME.as_ref()) {
path.as_ref()
.parent()
.ok_or_else(|| ManifestConvertError::NoParent(path.as_ref().to_path_buf()))?
.to_path_buf()
} else {
path.as_ref().to_path_buf()
};
Self::from_path(path).or_else(|_| {
let toml_path = dir_path.join("wally.toml");
let toml_contents = std::fs::read_to_string(toml_path)?;
let wally_manifest: crate::dependencies::wally::WallyManifest =
toml::from_str(&toml_contents).map_err(ManifestConvertError::ManifestConvert)?;
let dependencies =
crate::dependencies::wally::parse_wally_dependencies(wally_manifest.clone())?;
let mut place = BTreeMap::new();
if let Some(shared) = wally_manifest.place.shared_packages {
if !shared.is_empty() {
place.insert(Realm::Shared, shared);
}
}
if let Some(server) = wally_manifest.place.server_packages {
if !server.is_empty() {
place.insert(Realm::Server, server);
}
}
let manifest = Self {
name: wally_manifest.package.name.clone().into(),
version: wally_manifest.package.version,
exports: Exports {
lib: Some(RelativePathBuf::from("true")),
bin: None,
},
path_style: PathStyle::Roblox { place },
private: wally_manifest.package.private.unwrap_or(false),
realm: wally_manifest.package.realm,
indices: BTreeMap::from([(
crate::project::DEFAULT_INDEX_NAME.to_string(),
"".to_string(),
)]),
sourcemap_generator: None,
overrides: BTreeMap::new(),
dependencies,
peer_dependencies: Default::default(),
description: wally_manifest.package.description,
license: wally_manifest.package.license,
authors: wally_manifest.package.authors,
repository: None,
};
manifest.write(&dir_path)?;
update_sync_tool_files(&dir_path, wally_manifest.package.name.name().to_string())?;
Ok(manifest)
})
}
/// Same as `from_path`, enable the `wally` feature to add support for converting `wally.toml` to `pesde.yaml`
#[cfg(not(feature = "wally"))]
pub fn from_path_or_convert<P: AsRef<std::path::Path>>(
path: P,
) -> Result<Self, ManifestReadError> {
Self::from_path(path)
}
/// Returns all dependencies
pub fn dependencies(&self) -> BTreeMap<String, (DependencySpecifier, DependencyType)> {
self.dependencies
.iter()
.map(|(desired_name, specifier)| {
(
desired_name.clone(),
(specifier.clone(), DependencyType::Normal),
)
})
.chain(
self.peer_dependencies
.iter()
.map(|(desired_name, specifier)| {
(
desired_name.clone(),
(specifier.clone(), DependencyType::Peer),
)
}),
)
.collect()
}
/// Writes the manifest to a path
pub fn write<P: AsRef<std::path::Path>>(&self, to: P) -> Result<(), ManifestWriteError> {
let manifest_path = to.as_ref().join(MANIFEST_FILE_NAME);
serde_yaml::to_writer(std::fs::File::create(manifest_path)?, self)?;
Ok(())
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum OverrideKeyFromStr {}
}

View file

@ -1,47 +0,0 @@
use std::sync::mpsc::{Receiver, Sender};
use threadpool::ThreadPool;
/// A multithreaded job
pub struct MultithreadedJob<E: Send + Sync + 'static> {
progress: Receiver<Result<(), E>>,
pool: ThreadPool,
}
impl<E: Send + Sync + 'static> MultithreadedJob<E> {
/// Creates a new multithreaded job
pub fn new() -> (Self, Sender<Result<(), E>>) {
let (tx, rx) = std::sync::mpsc::channel();
let pool = ThreadPool::new(6);
(Self { progress: rx, pool }, tx)
}
/// Returns the progress of the job
pub fn progress(&self) -> &Receiver<Result<(), E>> {
&self.progress
}
/// Waits for the job to finish
pub fn wait(self) -> Result<(), E> {
self.pool.join();
for result in self.progress {
result?;
}
Ok(())
}
/// Executes a function on the thread pool
pub fn execute<F>(&self, tx: &Sender<Result<(), E>>, f: F)
where
F: (FnOnce() -> Result<(), E>) + Send + 'static,
{
let sender = tx.clone();
self.pool.execute(move || {
let result = f();
sender.send(result).unwrap();
});
}
}

95
src/names.rs Normal file
View file

@ -0,0 +1,95 @@
use std::{fmt::Display, str::FromStr};
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
#[derive(Debug)]
pub enum ErrorReason {
Scope,
Name,
}
impl Display for ErrorReason {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ErrorReason::Scope => write!(f, "scope"),
ErrorReason::Name => write!(f, "name"),
}
}
}
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct PackageName(String, String);
impl FromStr for PackageName {
type Err = errors::PackageNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (scope, name) = s
.split_once('/')
.ok_or(Self::Err::InvalidFormat(s.to_string()))?;
for (reason, part) in [(ErrorReason::Scope, scope), (ErrorReason::Name, name)] {
if part.len() < 3 || part.len() > 32 {
return Err(Self::Err::InvalidLength(reason, part.to_string()));
}
if part.chars().all(|c| c.is_ascii_digit()) {
return Err(Self::Err::OnlyDigits(reason, part.to_string()));
}
if part.starts_with('_') || part.ends_with('_') {
return Err(Self::Err::PrePostfixUnderscore(reason, part.to_string()));
}
if !part.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
return Err(Self::Err::InvalidCharacters(reason, part.to_string()));
}
}
Ok(Self(scope.to_string(), name.to_string()))
}
}
impl Display for PackageName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}/{}", self.0, self.1)
}
}
impl PackageName {
pub fn as_str(&self) -> (&str, &str) {
(&self.0, &self.1)
}
}
#[derive(Debug, Deserialize, Serialize, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub enum PackageNames {
Pesde(PackageName),
}
pub mod errors {
use thiserror::Error;
use crate::names::ErrorReason;
#[derive(Debug, Error)]
pub enum PackageNameError {
#[error("package name `{0}` is not in the format `scope/name`")]
InvalidFormat(String),
#[error("package {0} `{1}` contains characters outside a-z, 0-9, and _")]
InvalidCharacters(ErrorReason, String),
#[error("package {0} `{1}` contains only digits")]
OnlyDigits(ErrorReason, String),
#[error("package {0} `{1}` starts or ends with an underscore")]
PrePostfixUnderscore(ErrorReason, String),
#[error("package {0} `{1}` is not within 3-32 characters long")]
InvalidLength(ErrorReason, String),
}
}

View file

@ -1,423 +0,0 @@
use std::{
fmt::Debug,
hash::Hash,
{fmt::Display, str::FromStr},
};
use cfg_if::cfg_if;
use serde::{
de::{IntoDeserializer, Visitor},
Deserialize, Serialize,
};
use thiserror::Error;
/// A package name
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct StandardPackageName(String, String);
/// An error that occurred while validating a package name part (scope or name)
#[derive(Debug, Error)]
pub enum StandardPackageNameValidationError {
/// The package name part is empty
#[error("package name part cannot be empty")]
EmptyPart,
/// The package name part contains invalid characters (only lowercase ASCII characters, numbers, and underscores are allowed)
#[error("package name {0} part can only contain lowercase ASCII characters, numbers, and underscores")]
InvalidPart(String),
/// The package name part is too long (it cannot be longer than 24 characters)
#[error("package name {0} part cannot be longer than 24 characters")]
PartTooLong(String),
}
/// Validates a package name part (scope or name)
pub fn validate_part(part: &str) -> Result<(), StandardPackageNameValidationError> {
if part.is_empty() {
return Err(StandardPackageNameValidationError::EmptyPart);
}
if !part
.chars()
.all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '_')
{
return Err(StandardPackageNameValidationError::InvalidPart(
part.to_string(),
));
}
if part.len() > 24 {
return Err(StandardPackageNameValidationError::PartTooLong(
part.to_string(),
));
}
Ok(())
}
/// A wally package name
#[cfg(feature = "wally")]
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct WallyPackageName(String, String);
/// An error that occurred while validating a wally package name part (scope or name)
#[cfg(feature = "wally")]
#[derive(Debug, Error)]
pub enum WallyPackageNameValidationError {
/// The package name part is empty
#[error("wally package name part cannot be empty")]
EmptyPart,
/// The package name part contains invalid characters (only lowercase ASCII characters, numbers, and dashes are allowed)
#[error("wally package name {0} part can only contain lowercase ASCII characters, numbers, and dashes")]
InvalidPart(String),
/// The package name part is too long (it cannot be longer than 64 characters)
#[error("wally package name {0} part cannot be longer than 64 characters")]
PartTooLong(String),
}
/// Validates a wally package name part (scope or name)
#[cfg(feature = "wally")]
pub fn validate_wally_part(part: &str) -> Result<(), WallyPackageNameValidationError> {
if part.is_empty() {
return Err(WallyPackageNameValidationError::EmptyPart);
}
if !part
.chars()
.all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-')
{
return Err(WallyPackageNameValidationError::InvalidPart(
part.to_string(),
));
}
if part.len() > 64 {
return Err(WallyPackageNameValidationError::PartTooLong(
part.to_string(),
));
}
Ok(())
}
/// An error that occurred while parsing an escaped package name
#[derive(Debug, Error)]
pub enum EscapedPackageNameError<E> {
/// This package name is missing a prefix
#[error("package name is missing prefix {0}")]
MissingPrefix(String),
/// This is not a valid escaped package name
#[error("package name {0} is not in the format `scope{ESCAPED_SEPARATOR}name`")]
Invalid(String),
/// The package name is invalid
#[error(transparent)]
InvalidName(#[from] E),
}
/// An error that occurred while parsing a package name
#[derive(Debug, Error)]
pub enum FromStrPackageNameParseError<E> {
/// This is not a valid package name
#[error("package name {0} is not in the format `scope{SEPARATOR}name`")]
Invalid(String),
/// The package name is invalid
#[error(transparent)]
InvalidPart(#[from] E),
}
const SEPARATOR: char = '/';
const ESCAPED_SEPARATOR: char = '+';
macro_rules! name_impl {
($Name:ident, $Error:ident, $validate:expr, $prefix:expr) => {
impl $Name {
/// Creates a new package name
pub fn new(scope: &str, name: &str) -> Result<Self, $Error> {
$validate(scope)?;
$validate(name)?;
Ok(Self(scope.to_string(), name.to_string()))
}
/// Parses an escaped package name
pub fn from_escaped(s: &str) -> Result<Self, EscapedPackageNameError<$Error>> {
if !s.starts_with($prefix) {
return Err(EscapedPackageNameError::MissingPrefix($prefix.to_string()));
}
let (scope, name) = &s[$prefix.len()..]
.split_once(ESCAPED_SEPARATOR)
.ok_or_else(|| EscapedPackageNameError::Invalid(s.to_string()))?;
Ok(Self::new(scope, name)?)
}
/// Gets the scope of the package name
pub fn scope(&self) -> &str {
&self.0
}
/// Gets the name of the package name
pub fn name(&self) -> &str {
&self.1
}
/// Gets the escaped form (for use in file names, etc.) of the package name
pub fn escaped(&self) -> String {
format!("{}{}{ESCAPED_SEPARATOR}{}", $prefix, self.0, self.1)
}
/// Gets the parts of the package name
pub fn parts(&self) -> (&str, &str) {
(&self.0, &self.1)
}
/// Returns the prefix for this package name
pub fn prefix() -> &'static str {
$prefix
}
}
impl Display for $Name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}{}{SEPARATOR}{}", $prefix, self.0, self.1)
}
}
impl FromStr for $Name {
type Err = FromStrPackageNameParseError<$Error>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let len = if s.starts_with($prefix) {
$prefix.len()
} else {
0
};
let parts: Vec<&str> = s[len..].split(SEPARATOR).collect();
if parts.len() != 2 {
return Err(FromStrPackageNameParseError::Invalid(s.to_string()));
}
Ok($Name::new(parts[0], parts[1])?)
}
}
impl Serialize for $Name {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for $Name {
fn deserialize<D: serde::Deserializer<'de>>(
deserializer: D,
) -> Result<$Name, D::Error> {
struct NameVisitor;
impl<'de> Visitor<'de> for NameVisitor {
type Value = $Name;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
formatter,
"a string in the format `{}scope{SEPARATOR}name`",
$prefix
)
}
fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
v.parse().map_err(E::custom)
}
}
deserializer.deserialize_str(NameVisitor)
}
}
};
}
/// A package name
#[derive(Serialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[serde(untagged)]
pub enum PackageName {
/// A standard package name
Standard(StandardPackageName),
/// A wally package name
#[cfg(feature = "wally")]
Wally(WallyPackageName),
}
impl PackageName {
/// Gets the scope of the package name
pub fn scope(&self) -> &str {
match self {
PackageName::Standard(name) => name.scope(),
#[cfg(feature = "wally")]
PackageName::Wally(name) => name.scope(),
}
}
/// Gets the name of the package name
pub fn name(&self) -> &str {
match self {
PackageName::Standard(name) => name.name(),
#[cfg(feature = "wally")]
PackageName::Wally(name) => name.name(),
}
}
/// Gets the escaped form (for use in file names, etc.) of the package name
pub fn escaped(&self) -> String {
match self {
PackageName::Standard(name) => name.escaped(),
#[cfg(feature = "wally")]
PackageName::Wally(name) => name.escaped(),
}
}
/// Gets the parts of the package name
pub fn parts(&self) -> (&str, &str) {
match self {
PackageName::Standard(name) => name.parts(),
#[cfg(feature = "wally")]
PackageName::Wally(name) => name.parts(),
}
}
/// Returns the prefix for this package name
pub fn prefix(&self) -> &'static str {
match self {
PackageName::Standard(_) => StandardPackageName::prefix(),
#[cfg(feature = "wally")]
PackageName::Wally(_) => WallyPackageName::prefix(),
}
}
}
impl Display for PackageName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PackageName::Standard(name) => write!(f, "{name}"),
#[cfg(feature = "wally")]
PackageName::Wally(name) => write!(f, "{name}"),
}
}
}
impl From<StandardPackageName> for PackageName {
fn from(name: StandardPackageName) -> Self {
PackageName::Standard(name)
}
}
#[cfg(feature = "wally")]
impl From<WallyPackageName> for PackageName {
fn from(name: WallyPackageName) -> Self {
PackageName::Wally(name)
}
}
#[cfg(feature = "wally")]
impl From<WallyPackageName> for StandardPackageName {
fn from(name: WallyPackageName) -> Self {
let (scope, name) = name.parts();
StandardPackageName(
scope[..scope.len().min(24)].replace('-', "_"),
name[..name.len().min(24)].replace('-', "_"),
)
}
}
name_impl!(
StandardPackageName,
StandardPackageNameValidationError,
validate_part,
""
);
#[cfg(feature = "wally")]
name_impl!(
WallyPackageName,
WallyPackageNameValidationError,
validate_wally_part,
"wally#"
);
impl<'de> Deserialize<'de> for PackageName {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let s = String::deserialize(deserializer)?;
cfg_if! {
if #[cfg(feature = "wally")] {
if s.starts_with(WallyPackageName::prefix()) {
return Ok(PackageName::Wally(
WallyPackageName::deserialize(s.into_deserializer())?,
));
}
}
}
Ok(PackageName::Standard(StandardPackageName::deserialize(
s.into_deserializer(),
)?))
}
}
/// An error that occurred while parsing a package name
#[derive(Debug, Error)]
pub enum FromStrPackageNameError {
/// Error parsing the package name as a standard package name
#[error("error parsing standard package name")]
Standard(#[from] FromStrPackageNameParseError<StandardPackageNameValidationError>),
/// Error parsing the package name as a wally package name
#[cfg(feature = "wally")]
#[error("error parsing wally package name")]
Wally(#[from] FromStrPackageNameParseError<WallyPackageNameValidationError>),
}
impl FromStr for PackageName {
type Err = FromStrPackageNameError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
cfg_if! {
if #[cfg(feature = "wally")] {
if s.starts_with(WallyPackageName::prefix()) {
return Ok(PackageName::Wally(WallyPackageName::from_str(s)?));
}
}
}
Ok(PackageName::Standard(StandardPackageName::from_str(s)?))
}
}
/// An error that occurred while parsing an escaped package name
#[derive(Debug, Error)]
pub enum FromEscapedStrPackageNameError {
/// Error parsing the package name as a standard package name
#[error("error parsing standard package name")]
Standard(#[from] EscapedPackageNameError<StandardPackageNameValidationError>),
/// Error parsing the package name as a wally package name
#[cfg(feature = "wally")]
#[error("error parsing wally package name")]
Wally(#[from] EscapedPackageNameError<WallyPackageNameValidationError>),
}
impl PackageName {
/// Like `from_str`, but for escaped package names
pub fn from_escaped_str(s: &str) -> Result<Self, FromEscapedStrPackageNameError> {
cfg_if! {
if #[cfg(feature = "wally")] {
if s.starts_with(WallyPackageName::prefix()) {
return Ok(PackageName::Wally(WallyPackageName::from_escaped(s)?));
}
}
}
Ok(PackageName::Standard(StandardPackageName::from_escaped(s)?))
}
}

View file

@ -1,214 +0,0 @@
use std::{
fs::{read, read_dir},
path::{Path, PathBuf},
};
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use log::debug;
use semver::Version;
use thiserror::Error;
use crate::{
dependencies::resolution::RootLockfileNode,
package_name::{FromEscapedStrPackageNameError, PackageName},
project::Project,
PATCHES_FOLDER,
};
fn make_signature<'a>() -> Result<Signature<'a>, git2::Error> {
Signature::now(
env!("CARGO_PKG_NAME"),
concat!(env!("CARGO_PKG_NAME"), "@localhost"),
)
}
/// Sets up a patches repository in the specified directory
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
let repo = Repository::init(&dir)?;
{
let signature = make_signature()?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let oid = index.write_tree()?;
let tree = repo.find_tree(oid)?;
repo.commit(Some("HEAD"), &signature, &signature, "original", &tree, &[])?;
}
Ok(repo)
}
/// An error that occurred while creating patches
#[derive(Debug, Error)]
pub enum CreatePatchError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while getting a file name
#[error("failed to get file name from {0}")]
FileNameFail(PathBuf),
/// An error that occurred while stripping a prefix
#[error("error stripping prefix {1} from path {2}")]
StripPrefixFail(#[source] std::path::StripPrefixError, PathBuf, PathBuf),
}
/// Creates a patch for the package in the specified directory
pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, CreatePatchError> {
let mut patches = vec![];
let repo = Repository::open(dir.as_ref())?;
let original = repo.head()?.peel_to_tree()?;
let diff = repo.diff_tree_to_workdir(Some(&original), None)?;
diff.print(DiffFormat::Patch, |_delta, _hunk, line| {
match line.origin_value() {
DiffLineType::Context | DiffLineType::Addition | DiffLineType::Deletion => {
let origin = line.origin();
let mut buffer = vec![0; origin.len_utf8()];
origin.encode_utf8(&mut buffer);
patches.extend(buffer);
}
_ => {}
}
patches.extend(line.content());
true
})?;
Ok(patches)
}
/// An error that occurred while applying patches
#[derive(Debug, Error)]
pub enum ApplyPatchesError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),
/// An error that occurred while getting a file name
#[error("failed to get file name from {0}")]
FileNameFail(PathBuf),
/// An error that occurred while converting a path to a string
#[error("failed to convert path to string")]
ToStringFail,
/// An error that occurred because a patch name was malformed
#[error("malformed patch name {0}")]
MalformedPatchName(String),
/// An error that occurred while parsing a package name
#[error("failed to parse package name {0}")]
PackageNameParse(#[from] FromEscapedStrPackageNameError),
/// An error that occurred while getting a file stem
#[error("failed to get file stem")]
FileStemFail,
/// An error that occurred while reading a file
#[error("failed to read file")]
ReadFail,
/// An error that occurred because a package was not found in the dependencies
#[error("package {0} not found in the lockfile")]
PackageNotFound(PackageName),
/// An error that occurred because a version was not found for a package
#[error("version {0} not found for package {1}")]
VersionNotFound(Version, PackageName),
/// An error that occurred while parsing a version
#[error("failed to parse version")]
VersionParse(#[from] semver::Error),
/// An error that occurred while stripping a prefix
#[error("strip prefix error")]
StripPrefixFail(#[from] std::path::StripPrefixError),
}
impl Project {
/// Applies patches for the project
pub fn apply_patches(&self, lockfile: &RootLockfileNode) -> Result<(), ApplyPatchesError> {
let patches_dir = self.path().join(PATCHES_FOLDER);
if !patches_dir.exists() {
return Ok(());
}
for file in read_dir(&patches_dir)? {
let file = file?;
if !file.file_type()?.is_file() {
continue;
}
let path = file.path();
let file_name = path
.file_name()
.ok_or_else(|| ApplyPatchesError::FileNameFail(path.clone()))?;
let file_name = file_name.to_str().ok_or(ApplyPatchesError::ToStringFail)?;
let (package_name, version) = file_name
.strip_suffix(".patch")
.unwrap_or(file_name)
.split_once('@')
.ok_or_else(|| ApplyPatchesError::MalformedPatchName(file_name.to_string()))?;
let package_name = PackageName::from_escaped_str(package_name)?;
let version = Version::parse(version)?;
let resolved_pkg = lockfile
.children
.get(&package_name)
.ok_or_else(|| ApplyPatchesError::PackageNotFound(package_name.clone()))?
.get(&version)
.ok_or_else(|| {
ApplyPatchesError::VersionNotFound(version.clone(), package_name.clone())
})?;
debug!("resolved package {package_name}@{version} to {resolved_pkg}");
let (_, source_path) = resolved_pkg.directory(self.path());
let diff = Diff::from_buffer(&read(&path)?)?;
let repo = match Repository::open(&source_path) {
Ok(repo) => repo,
Err(_) => setup_patches_repo(&source_path)?,
};
repo.apply(&diff, ApplyLocation::Both, None)?;
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let signature = make_signature()?;
let tree_id = index.write_tree()?;
let tree = repo.find_tree(tree_id)?;
let parent = repo.head()?.peel_to_commit()?;
repo.commit(
Some("HEAD"),
&signature,
&signature,
"applied patches",
&tree,
&[&parent],
)?;
}
Ok(())
}
}

View file

@ -1,326 +0,0 @@
use log::{error, warn};
use std::{
collections::HashMap,
fmt::Debug,
fs::{read, File},
path::{Path, PathBuf},
};
use thiserror::Error;
use url::Url;
use crate::{
dependencies::{resolution::RootLockfileNode, DownloadError, UrlResolveError},
index::Index,
linking_file::LinkingDependenciesError,
manifest::{update_sync_tool_files, Manifest, ManifestReadError},
LOCKFILE_FILE_NAME,
};
/// A map of indices
pub type Indices = HashMap<String, Box<dyn Index>>;
/// A pesde project
#[derive(Debug)]
pub struct Project {
path: PathBuf,
cache_path: PathBuf,
indices: Indices,
manifest: Manifest,
pub(crate) reqwest_client: reqwest::blocking::Client,
}
/// Options for installing a project
pub struct InstallOptions {
locked: bool,
auto_download: bool,
lockfile: Option<RootLockfileNode>,
}
impl Default for InstallOptions {
fn default() -> Self {
Self {
locked: false,
auto_download: true,
lockfile: None,
}
}
}
impl InstallOptions {
/// Creates a new set of install options (uses the Default implementation)
pub fn new() -> Self {
Self::default()
}
/// Makes the installation to use the lockfile, and ensure that the lockfile is up-to-date
pub fn locked(&self, locked: bool) -> Self {
Self {
locked,
lockfile: self.lockfile.clone(),
..*self
}
}
/// Makes the installation to automatically download the dependencies
/// Having this set to false is only useful if you want to download the dependencies yourself. An example of this is the CLI's progress bar
pub fn auto_download(&self, auto_download: bool) -> Self {
Self {
auto_download,
lockfile: self.lockfile.clone(),
..*self
}
}
/// Makes the installation to use the given lockfile
/// Having this set to Some is only useful if you're using auto_download = false
pub fn lockfile(&self, lockfile: RootLockfileNode) -> Self {
Self {
lockfile: Some(lockfile),
..*self
}
}
}
/// An error that occurred while reading the lockfile
#[derive(Debug, Error)]
pub enum ReadLockfileError {
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while deserializing the lockfile
#[error("error deserializing lockfile")]
LockfileDeser(#[source] serde_yaml::Error),
}
/// An error that occurred while downloading a project
#[derive(Debug, Error)]
pub enum InstallProjectError {
/// An error that occurred while resolving the dependency graph
#[error("failed to resolve dependency graph")]
ResolveGraph(#[from] crate::dependencies::resolution::ResolveError),
/// An error that occurred while downloading a package
#[error("failed to download package")]
DownloadPackage(#[from] DownloadError),
/// An error that occurred while applying patches
#[error("error applying patches")]
ApplyPatches(#[from] crate::patches::ApplyPatchesError),
/// An error that occurred while linking dependencies
#[error("failed to link dependencies")]
Linking(#[from] LinkingDependenciesError),
/// An error that occurred while interacting with the file system
#[error("error interacting with the file system")]
Io(#[from] std::io::Error),
/// An error that occurred while writing the lockfile
#[error("failed to write lockfile")]
LockfileSer(#[source] serde_yaml::Error),
/// An error that occurred while resolving the url of a package
#[error("failed to resolve package URL")]
UrlResolve(#[from] UrlResolveError),
/// An error that occurred while reading the lockfile
#[error("failed to read lockfile")]
ReadLockfile(#[from] ReadLockfileError),
}
/// The name of the default index to use
pub const DEFAULT_INDEX_NAME: &str = "default";
pub(crate) fn get_index<'a>(indices: &'a Indices, index_name: Option<&str>) -> &'a dyn Index {
indices
.get(index_name.unwrap_or(DEFAULT_INDEX_NAME))
.or_else(|| {
warn!(
"index `{}` not found, using default index",
index_name.unwrap_or("<not provided>")
);
indices.get(DEFAULT_INDEX_NAME)
})
.unwrap()
.as_ref()
}
pub(crate) fn get_index_by_url<'a>(indices: &'a Indices, url: &Url) -> &'a dyn Index {
indices
.values()
.find(|index| index.url() == url)
.map(|index| index.as_ref())
.unwrap_or_else(|| get_index(indices, None))
}
#[cfg(feature = "wally")]
pub(crate) fn get_wally_index<'a>(
indices: &'a mut Indices,
url: &Url,
path: Option<&Path>,
) -> Result<&'a crate::index::WallyIndex, crate::index::RefreshError> {
if !indices.contains_key(url.as_str()) {
let default_index = indices.get(DEFAULT_INDEX_NAME).unwrap();
let default_token = default_index.registry_auth_token().map(|t| t.to_string());
let default_credentials_fn = default_index.credentials_fn().cloned();
let index = crate::index::WallyIndex::new(
url.clone(),
default_token,
path.expect("index should already exist by now"),
default_credentials_fn,
);
match index.refresh() {
Ok(_) => {
indices.insert(url.as_str().to_string(), Box::new(index));
}
Err(e) => {
error!("failed to refresh wally index: {e}");
return Err(e);
}
}
}
Ok(indices
.get(url.as_str())
.unwrap()
.as_any()
.downcast_ref()
.unwrap())
}
/// An error that occurred while creating a new project
#[derive(Debug, Error)]
pub enum NewProjectError {
/// A default index was not provided
#[error("default index not provided")]
DefaultIndexNotProvided,
}
/// An error that occurred while creating a project from a path
#[derive(Debug, Error)]
pub enum ProjectFromPathError {
/// An error that occurred while reading the manifest
#[error("error reading manifest")]
ManifestRead(#[from] ManifestReadError),
/// An error that occurred while creating the project
#[error("error creating project")]
NewProject(#[from] NewProjectError),
}
impl Project {
/// Creates a new project
pub fn new<P: AsRef<Path>, Q: AsRef<Path>>(
path: P,
cache_path: Q,
indices: Indices,
manifest: Manifest,
) -> Result<Self, NewProjectError> {
if !indices.contains_key(DEFAULT_INDEX_NAME) {
return Err(NewProjectError::DefaultIndexNotProvided);
}
Ok(Self {
path: path.as_ref().to_path_buf(),
cache_path: cache_path.as_ref().to_path_buf(),
indices,
manifest,
reqwest_client: reqwest::blocking::ClientBuilder::new()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
env!("CARGO_PKG_VERSION")
))
.build()
.unwrap(),
})
}
/// Creates a new project from a path (manifest will be read from the path)
pub fn from_path<P: AsRef<Path>, Q: AsRef<Path>>(
path: P,
cache_path: Q,
indices: Indices,
) -> Result<Self, ProjectFromPathError> {
let manifest = Manifest::from_path(path.as_ref())?;
Ok(Self::new(path, cache_path, indices, manifest)?)
}
/// Returns the indices of the project
pub fn indices(&self) -> &HashMap<String, Box<dyn Index>> {
&self.indices
}
#[cfg(feature = "wally")]
pub(crate) fn indices_mut(&mut self) -> &mut HashMap<String, Box<dyn Index>> {
&mut self.indices
}
/// Returns the manifest of the project
pub fn manifest(&self) -> &Manifest {
&self.manifest
}
/// Returns the cache directory of the project
pub fn cache_dir(&self) -> &Path {
&self.cache_path
}
/// Returns the path of the project
pub fn path(&self) -> &Path {
&self.path
}
/// Returns the lockfile of the project
pub fn lockfile(&self) -> Result<Option<RootLockfileNode>, ReadLockfileError> {
let lockfile_path = self.path.join(LOCKFILE_FILE_NAME);
Ok(if lockfile_path.exists() {
let lockfile_contents = read(&lockfile_path)?;
let lockfile: RootLockfileNode = serde_yaml::from_slice(&lockfile_contents)
.map_err(ReadLockfileError::LockfileDeser)?;
Some(lockfile)
} else {
None
})
}
/// Downloads the project's dependencies, applies patches, and links the dependencies
pub fn install(&mut self, install_options: InstallOptions) -> Result<(), InstallProjectError> {
let old_lockfile = self.lockfile()?;
let lockfile = match install_options.lockfile {
Some(map) => map,
None => {
let manifest = self.manifest.clone();
manifest.dependency_graph(self, install_options.locked)?
}
};
if install_options.auto_download {
self.download(&lockfile)?.wait()?;
}
self.apply_patches(&lockfile)?;
self.link_dependencies(&lockfile)?;
if !install_options.locked {
serde_yaml::to_writer(File::create(self.path.join(LOCKFILE_FILE_NAME))?, &lockfile)
.map_err(InstallProjectError::LockfileSer)?;
}
if !old_lockfile.is_some_and(|old| old.name == lockfile.name) {
update_sync_tool_files(self.path(), lockfile.name.name().to_string())?;
}
Ok(())
}
}

79
src/source/mod.rs Normal file
View file

@ -0,0 +1,79 @@
use std::{collections::BTreeMap, fmt::Debug, path::Path};
use semver::Version;
use serde::{Deserialize, Serialize};
use crate::Project;
pub mod pesde;
pub trait DependencySpecifier: Debug {
fn alias(&self) -> &str;
fn set_alias(&mut self, alias: String);
}
pub trait PackageRef: Debug {}
pub(crate) fn hash<S: std::hash::Hash>(struc: &S) -> String {
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
let mut hasher = DefaultHasher::new();
struc.hash(&mut hasher);
hasher.finish().to_string()
}
pub trait PackageSource: Debug {
type Ref: PackageRef;
type Specifier: DependencySpecifier;
type RefreshError: std::error::Error;
type ResolveError: std::error::Error;
type DownloadError: std::error::Error;
fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
Ok(())
}
fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError>;
fn download(
&self,
pkg_ref: &Self::Ref,
destination: &Path,
project: &Project,
) -> Result<(), Self::DownloadError>;
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
#[serde(untagged)]
pub enum DependencySpecifiers {
Pesde(pesde::PesdeDependencySpecifier),
}
impl DependencySpecifiers {
pub fn alias(&self) -> &str {
match self {
DependencySpecifiers::Pesde(spec) => spec.alias(),
}
}
pub fn set_alias(&mut self, alias: String) {
match self {
DependencySpecifiers::Pesde(spec) => spec.set_alias(alias),
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum PackageRefs {
Pesde(pesde::PesdePackageRef),
}
#[derive(Debug, Eq, PartialEq, Hash)]
pub enum PackageSources {
Pesde(pesde::PesdePackageSource),
}

588
src/source/pesde.rs Normal file
View file

@ -0,0 +1,588 @@
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::Path};
use gix::remote::Direction;
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use crate::{
authenticate_conn,
manifest::Target,
names::PackageName,
source::{hash, DependencySpecifier, DependencySpecifiers, PackageRef, PackageSource},
Project, REQWEST_CLIENT,
};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
pub struct PesdeDependencySpecifier {
pub name: PackageName,
pub version: VersionReq,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub alias: String,
}
impl DependencySpecifier for PesdeDependencySpecifier {
fn alias(&self) -> &str {
self.alias.as_str()
}
fn set_alias(&mut self, alias: String) {
self.alias = alias;
}
}
#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)]
pub struct PesdePackageRef {
name: PackageName,
version: Version,
}
impl PackageRef for PesdePackageRef {}
impl Ord for PesdePackageRef {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.version.cmp(&other.version)
}
}
impl PartialOrd for PesdePackageRef {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug, Hash, PartialEq, Eq)]
pub struct PesdePackageSource {
repo_url: gix::Url,
}
const OWNERS_FILE: &str = "owners.yaml";
impl PesdePackageSource {
pub fn new(repo_url: gix::Url) -> Self {
Self { repo_url }
}
pub fn path(&self, project: &Project) -> std::path::PathBuf {
project.data_dir.join("indices").join(hash(self))
}
pub(crate) fn tree<'a>(
&'a self,
repo: &'a gix::Repository,
) -> Result<gix::Tree, Box<errors::TreeError>> {
// this is a bare repo, so this is the actual path
let path = repo.path().to_path_buf();
let remote = match repo.find_default_remote(Direction::Fetch) {
Some(Ok(remote)) => remote,
Some(Err(e)) => return Err(Box::new(errors::TreeError::GetDefaultRemote(path, e))),
None => {
return Err(Box::new(errors::TreeError::NoDefaultRemote(path)));
}
};
let refspec = match remote.refspecs(Direction::Fetch).first() {
Some(head) => head,
None => return Err(Box::new(errors::TreeError::NoRefSpecs(path))),
};
let spec_ref = refspec.to_ref();
let local_ref = match spec_ref.local() {
Some(local) => local
.to_string()
.replace('*', repo.branch_names().first().unwrap_or(&"main")),
None => return Err(Box::new(errors::TreeError::NoLocalRefSpec(path))),
};
let reference = match repo.find_reference(&local_ref) {
Ok(reference) => reference,
Err(e) => {
return Err(Box::new(errors::TreeError::NoReference(
local_ref.to_string(),
e,
)))
}
};
let reference_name = reference.name().as_bstr().to_string();
let id = match reference.into_fully_peeled_id() {
Ok(id) => id,
Err(e) => return Err(Box::new(errors::TreeError::CannotPeel(reference_name, e))),
};
let id_str = id.to_string();
let object = match id.object() {
Ok(object) => object,
Err(e) => {
return Err(Box::new(errors::TreeError::CannotConvertToObject(
id_str, e,
)))
}
};
match object.peel_to_tree() {
Ok(tree) => Ok(tree),
Err(e) => Err(Box::new(errors::TreeError::CannotPeelToTree(id_str, e))),
}
}
pub(crate) fn read_file<
I: IntoIterator<Item = P> + Clone,
P: ToString + PartialEq<gix::bstr::BStr>,
>(
&self,
file_path: I,
project: &Project,
) -> Result<Option<Vec<u8>>, Box<errors::ReadFile>> {
let path = self.path(project);
let repo = match gix::open(&path) {
Ok(repo) => repo,
Err(e) => return Err(Box::new(errors::ReadFile::Open(path, e))),
};
let tree = match self.tree(&repo) {
Ok(tree) => tree,
Err(e) => return Err(Box::new(errors::ReadFile::Tree(path, e))),
};
let file_path_str = file_path
.clone()
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join(std::path::MAIN_SEPARATOR_STR);
let mut lookup_buf = vec![];
let entry = match tree.lookup_entry(file_path, &mut lookup_buf) {
Ok(Some(entry)) => entry,
Ok(None) => return Ok(None),
Err(e) => return Err(Box::new(errors::ReadFile::Lookup(file_path_str, e))),
};
let object = match entry.object() {
Ok(object) => object,
Err(e) => return Err(Box::new(errors::ReadFile::Lookup(file_path_str, e))),
};
let blob = object.into_blob();
Ok(Some(blob.data.clone()))
}
pub fn config(&self, project: &Project) -> Result<IndexConfig, Box<errors::ConfigError>> {
let file = self
.read_file(["config.yaml"], project)
.map_err(|e| Box::new(e.into()))?;
let bytes = match file {
Some(bytes) => bytes,
None => {
return Err(Box::new(errors::ConfigError::Missing(
self.repo_url.clone(),
)))
}
};
let config: IndexConfig = serde_yaml::from_slice(&bytes).map_err(|e| Box::new(e.into()))?;
Ok(config)
}
pub fn all_packages(
&self,
project: &Project,
) -> Result<BTreeMap<PackageName, IndexFile>, Box<errors::AllPackagesError>> {
let path = self.path(project);
let repo = match gix::open(&path) {
Ok(repo) => repo,
Err(e) => return Err(Box::new(errors::AllPackagesError::Open(path, e))),
};
let tree = match self.tree(&repo) {
Ok(tree) => tree,
Err(e) => return Err(Box::new(errors::AllPackagesError::Tree(path, e))),
};
let mut packages = BTreeMap::<PackageName, IndexFile>::new();
for entry in tree.iter() {
let entry = match entry {
Ok(entry) => entry,
Err(e) => return Err(Box::new(errors::AllPackagesError::Decode(path, e))),
};
let object = match entry.object() {
Ok(object) => object,
Err(e) => return Err(Box::new(errors::AllPackagesError::Convert(path, e))),
};
// directories will be trees, and files will be blobs
if !matches!(object.kind, gix::object::Kind::Tree) {
continue;
}
let package_scope = entry.filename().to_string();
for inner_entry in object.into_tree().iter() {
let inner_entry = match inner_entry {
Ok(entry) => entry,
Err(e) => return Err(Box::new(errors::AllPackagesError::Decode(path, e))),
};
let object = match inner_entry.object() {
Ok(object) => object,
Err(e) => return Err(Box::new(errors::AllPackagesError::Convert(path, e))),
};
if !matches!(object.kind, gix::object::Kind::Blob) {
continue;
}
let package_name = inner_entry.filename().to_string();
if package_name == OWNERS_FILE {
continue;
}
let blob = object.into_blob();
let file: IndexFileEntry = match serde_yaml::from_slice(&blob.data) {
Ok(file) => file,
Err(e) => {
return Err(Box::new(errors::AllPackagesError::Deserialize(
package_name,
path,
e,
)))
}
};
// if this panics, it's an issue with the index.
let name = format!("{package_scope}/{package_name}").parse().unwrap();
packages
.entry(name)
.or_default()
.insert(file.version.clone(), file);
}
}
Ok(packages)
}
}
impl PackageSource for PesdePackageSource {
type Ref = PesdePackageRef;
type Specifier = PesdeDependencySpecifier;
type RefreshError = errors::RefreshError;
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
let path = self.path(project);
if path.exists() {
let repo = match gix::open(&path) {
Ok(repo) => repo,
Err(e) => return Err(Self::RefreshError::Open(path, e)),
};
let remote = match repo.find_default_remote(Direction::Fetch) {
Some(Ok(remote)) => remote,
Some(Err(e)) => return Err(Self::RefreshError::GetDefaultRemote(path, e)),
None => {
return Err(Self::RefreshError::NoDefaultRemote(path));
}
};
let mut connection = remote
.connect(Direction::Fetch)
.map_err(|e| Self::RefreshError::Connect(self.repo_url.clone(), e))?;
authenticate_conn(&mut connection, project.auth_config.clone());
connection
.prepare_fetch(gix::progress::Discard, Default::default())
.map_err(|e| Self::RefreshError::PrepareFetch(self.repo_url.clone(), e))?
.receive(gix::progress::Discard, &false.into())
.map_err(|e| Self::RefreshError::Read(self.repo_url.clone(), e))?;
return Ok(());
}
std::fs::create_dir_all(&path)?;
let auth_config = project.auth_config.clone();
gix::prepare_clone_bare(self.repo_url.clone(), &path)
.map_err(|e| Self::RefreshError::Clone(self.repo_url.clone(), e))?
.configure_connection(move |c| {
authenticate_conn(c, auth_config.clone());
Ok(())
})
.fetch_only(gix::progress::Discard, &false.into())
.map_err(|e| Self::RefreshError::Fetch(self.repo_url.clone(), e))?;
Ok(())
}
fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
let (scope, name) = specifier.name.as_str();
let bytes = match self.read_file([scope, name], project) {
Ok(Some(bytes)) => bytes,
Ok(None) => return Ok(BTreeMap::new()),
Err(e) => return Err(Self::ResolveError::Read(specifier.name.to_string(), e)),
};
let entries: Vec<IndexFileEntry> = serde_yaml::from_slice(&bytes)
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
Ok(entries
.into_iter()
.filter(|entry| specifier.version.matches(&entry.version))
.map(|entry| {
(
entry.version.clone(),
PesdePackageRef {
name: specifier.name.clone(),
version: entry.version,
},
)
})
.collect())
}
fn download(
&self,
pkg_ref: &Self::Ref,
destination: &Path,
project: &Project,
) -> Result<(), Self::DownloadError> {
let config = self.config(project)?;
let (scope, name) = pkg_ref.name.as_str();
let url = config
.download()
.replace("{PACKAGE_SCOPE}", scope)
.replace("{PACKAGE_NAME}", name)
.replace("{PACKAGE_VERSION}", &pkg_ref.version.to_string());
let mut response = REQWEST_CLIENT.get(url);
if let Some(token) = &project.auth_config.pesde_token {
use secrecy::ExposeSecret;
response =
response.header("Authorization", format!("Bearer {}", token.expose_secret()));
}
let response = response.send()?;
let bytes = response.bytes()?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder);
archive.unpack(destination)?;
Ok(())
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct IndexConfig {
pub api: url::Url,
pub download: Option<String>,
#[serde(default)]
pub git_allowed: bool,
#[serde(default)]
pub custom_registry_allowed: bool,
pub github_oauth_client_id: String,
}
impl IndexConfig {
pub fn api(&self) -> &str {
self.api.as_str().trim_end_matches('/')
}
pub fn download(&self) -> String {
self.download
.as_ref()
.unwrap_or(
&"{API_URL}/v0/packages/{PACKAGE_SCOPE}/{PACKAGE_NAME}/{PACKAGE_VERSION}"
.to_string(),
)
.replace("{API_URL}", self.api())
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub struct IndexFileEntry {
pub version: Version,
pub target: Target,
#[serde(default = "chrono::Utc::now")]
pub published_at: chrono::DateTime<chrono::Utc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dependencies: Vec<DependencySpecifiers>,
}
impl Ord for IndexFileEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.target
.cmp(&other.target)
.then_with(|| self.version.cmp(&other.version))
}
}
impl PartialOrd for IndexFileEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
pub type IndexFile = BTreeMap<Version, IndexFileEntry>;
pub mod errors {
use std::path::PathBuf;
use thiserror::Error;
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum RefreshError {
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
#[error("error opening repository at {0}")]
Open(PathBuf, gix::open::Error),
#[error("no default remote found in repository at {0}")]
NoDefaultRemote(PathBuf),
#[error("error getting default remote from repository at {0}")]
GetDefaultRemote(PathBuf, gix::remote::find::existing::Error),
#[error("error connecting to remote repository at {0}")]
Connect(gix::Url, gix::remote::connect::Error),
#[error("error preparing fetch from remote repository at {0}")]
PrepareFetch(gix::Url, gix::remote::fetch::prepare::Error),
#[error("error reading from remote repository at {0}")]
Read(gix::Url, gix::remote::fetch::Error),
#[error("error cloning repository from {0}")]
Clone(gix::Url, gix::clone::Error),
#[error("error fetching repository from {0}")]
Fetch(gix::Url, gix::clone::fetch::Error),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum TreeError {
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
#[error("error opening repository at {0}")]
Open(PathBuf, gix::open::Error),
#[error("no default remote found in repository at {0}")]
NoDefaultRemote(PathBuf),
#[error("error getting default remote from repository at {0}")]
GetDefaultRemote(PathBuf, gix::remote::find::existing::Error),
#[error("no refspecs found in repository at {0}")]
NoRefSpecs(PathBuf),
#[error("no local refspec found in repository at {0}")]
NoLocalRefSpec(PathBuf),
#[error("no reference found for local refspec {0}")]
NoReference(String, gix::reference::find::existing::Error),
#[error("cannot peel reference {0}")]
CannotPeel(String, gix::reference::peel::Error),
#[error("error converting id {0} to object")]
CannotConvertToObject(String, gix::object::find::existing::Error),
#[error("error peeling object {0} to tree")]
CannotPeelToTree(String, gix::object::peel::to_kind::Error),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ReadFile {
#[error("error opening repository at {0}")]
Open(PathBuf, gix::open::Error),
#[error("error getting tree from repository at {0}")]
Tree(PathBuf, Box<TreeError>),
#[error("error looking up entry {0} in tree")]
Lookup(String, gix::object::find::existing::Error),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ResolveError {
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
#[error("error reading file for {0}")]
Read(String, Box<ReadFile>),
#[error("error parsing file for {0}")]
Parse(String, serde_yaml::Error),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ConfigError {
#[error("error reading config file")]
ReadFile(#[from] Box<ReadFile>),
#[error("error parsing config file")]
Parse(#[from] serde_yaml::Error),
#[error("missing config file for index at {0}")]
Missing(gix::Url),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AllPackagesError {
#[error("error opening repository at {0}")]
Open(PathBuf, gix::open::Error),
#[error("error getting tree from repository at {0}")]
Tree(PathBuf, Box<TreeError>),
#[error("error decoding entry in repository at {0}")]
Decode(PathBuf, gix::objs::decode::Error),
#[error("error converting entry in repository at {0}")]
Convert(PathBuf, gix::object::find::existing::Error),
#[error("error deserializing file {0} in repository at {1}")]
Deserialize(String, PathBuf, serde_yaml::Error),
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadError {
#[error("error reading config file")]
ReadFile(#[from] Box<ConfigError>),
#[error("error downloading package")]
Download(#[from] reqwest::Error),
#[error("error unpacking package")]
Unpack(#[from] std::io::Error),
}
}

View file

@ -1,120 +0,0 @@
use std::{
any::Any,
collections::{BTreeSet, HashMap},
sync::Arc,
};
use url::Url;
use pesde::{
index::{
ConfigError, CreatePackageVersionError, CredentialsFn, Index, IndexConfig, IndexFile,
IndexFileEntry, IndexPackageError, ScopeOwners, ScopeOwnersError,
},
manifest::Manifest,
package_name::PackageName,
};
/// An in-memory implementation of the [`Index`] trait. Used for testing.
#[derive(Debug, Clone)]
pub struct InMemoryIndex {
packages: HashMap<String, (BTreeSet<u64>, IndexFile)>,
url: Url,
}
impl Default for InMemoryIndex {
fn default() -> Self {
Self {
packages: HashMap::new(),
url: Url::parse("https://example.com").unwrap(),
}
}
}
impl InMemoryIndex {
pub fn new() -> Self {
Self::default()
}
pub fn with_scope(mut self, scope: &str, owners: BTreeSet<u64>) -> Self {
self.packages
.insert(scope.to_string(), (owners, IndexFile::default()));
self
}
pub fn with_package(mut self, scope: &str, index_file: IndexFileEntry) -> Self {
self.packages
.entry(scope.to_string())
.or_insert_with(|| (BTreeSet::new(), IndexFile::default()))
.1
.insert(index_file);
self
}
}
impl Index for InMemoryIndex {
fn scope_owners(&self, scope: &str) -> Result<Option<ScopeOwners>, ScopeOwnersError> {
Ok(self.packages.get(scope).map(|(owners, _)| owners).cloned())
}
fn create_scope_for(
&mut self,
scope: &str,
owners: &ScopeOwners,
) -> Result<bool, ScopeOwnersError> {
self.packages
.insert(scope.to_string(), (owners.clone(), IndexFile::default()));
Ok(true)
}
fn package(&self, name: &PackageName) -> Result<Option<IndexFile>, IndexPackageError> {
Ok(self
.packages
.get(name.scope())
.map(|(_, file)| file.clone()))
}
fn create_package_version(
&mut self,
manifest: &Manifest,
uploader: &u64,
) -> Result<Option<IndexFileEntry>, CreatePackageVersionError> {
let scope = manifest.name.scope();
if let Some(owners) = self.scope_owners(scope)? {
if !owners.contains(uploader) {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
} else if !self.create_scope_for(scope, &BTreeSet::from([*uploader]))? {
return Err(CreatePackageVersionError::MissingScopeOwnership);
}
let package = self.packages.get_mut(scope).unwrap();
let entry: IndexFileEntry = manifest.clone().try_into()?;
package.1.insert(entry.clone());
Ok(Some(entry))
}
fn config(&self) -> Result<IndexConfig, ConfigError> {
Ok(IndexConfig {
download: None,
api: "http://127.0.0.1:8080".parse().unwrap(),
github_oauth_client_id: "".to_string(),
custom_registry_allowed: false,
git_allowed: false,
})
}
fn credentials_fn(&self) -> Option<&Arc<CredentialsFn>> {
None
}
fn url(&self) -> &Url {
&self.url
}
fn as_any(&self) -> &dyn Any {
self
}
}

View file

@ -1,141 +0,0 @@
use std::collections::{BTreeMap, BTreeSet, HashMap};
use semver::Version;
use tempfile::tempdir;
use pesde::{
dependencies::{
registry::{RegistryDependencySpecifier, RegistryPackageRef},
resolution::ResolvedPackage,
DependencySpecifier, PackageRef,
},
index::Index,
manifest::{DependencyType, Manifest, Realm},
package_name::StandardPackageName,
project::{Project, DEFAULT_INDEX_NAME},
};
use prelude::*;
mod prelude;
#[test]
fn test_resolves_package() {
let dir = tempdir().unwrap();
let dir_path = dir.path().to_path_buf();
let index = InMemoryIndex::new();
let version_str = "0.1.0";
let version: Version = version_str.parse().unwrap();
let version_2_str = "0.1.1";
let version_2: Version = version_2_str.parse().unwrap();
let description = "test package";
let pkg_name = StandardPackageName::new("test", "test").unwrap();
let pkg_manifest = Manifest {
name: pkg_name.clone(),
version: version.clone(),
exports: Default::default(),
path_style: Default::default(),
private: true,
realm: None,
indices: Default::default(),
#[cfg(feature = "wally")]
sourcemap_generator: None,
overrides: Default::default(),
dependencies: Default::default(),
peer_dependencies: Default::default(),
description: Some(description.to_string()),
license: None,
authors: None,
repository: None,
};
let mut pkg_2_manifest = pkg_manifest.clone();
pkg_2_manifest.version = version_2.clone();
let index = index
.with_scope(pkg_name.scope(), BTreeSet::from([0]))
.with_package(pkg_name.scope(), pkg_manifest.try_into().unwrap())
.with_package(pkg_name.scope(), pkg_2_manifest.try_into().unwrap());
let specifier = DependencySpecifier::Registry(RegistryDependencySpecifier {
name: pkg_name.clone(),
version: format!("={version_str}").parse().unwrap(),
realm: None,
index: DEFAULT_INDEX_NAME.to_string(),
});
let specifier_2 = DependencySpecifier::Registry(RegistryDependencySpecifier {
name: pkg_name.clone(),
version: format!(">{version_str}").parse().unwrap(),
realm: None,
index: DEFAULT_INDEX_NAME.to_string(),
});
let user_manifest = Manifest {
name: "test/user".parse().unwrap(),
version: version.clone(),
exports: Default::default(),
path_style: Default::default(),
private: true,
realm: None,
indices: Default::default(),
#[cfg(feature = "wally")]
sourcemap_generator: None,
overrides: Default::default(),
dependencies: BTreeMap::from([("test".to_string(), specifier.clone())]),
peer_dependencies: BTreeMap::from([("test2".to_string(), specifier_2.clone())]),
description: Some(description.to_string()),
license: None,
authors: None,
repository: None,
};
let mut project = Project::new(
&dir_path,
&dir_path,
HashMap::from([(
DEFAULT_INDEX_NAME.to_string(),
Box::new(index.clone()) as Box<dyn Index>,
)]),
user_manifest,
)
.unwrap();
let manifest = project.manifest().clone();
let graph = manifest.dependency_graph(&mut project, false).unwrap();
assert_eq!(graph.children.len(), 1);
let versions = graph.children.get(&pkg_name.clone().into()).unwrap();
assert_eq!(versions.len(), 2);
let resolved_pkg = versions.get(&version).unwrap();
assert_eq!(
resolved_pkg,
&ResolvedPackage {
pkg_ref: PackageRef::Registry(RegistryPackageRef {
name: pkg_name.clone(),
version: version.clone(),
index_url: index.url().clone(),
}),
dependencies: Default::default(),
realm: Realm::Shared,
dep_type: DependencyType::Normal,
}
);
let resolved_pkg_2 = versions.get(&version_2).unwrap();
assert_eq!(
resolved_pkg_2,
&ResolvedPackage {
pkg_ref: PackageRef::Registry(RegistryPackageRef {
name: pkg_name.clone(),
version: version_2.clone(),
index_url: index.url().clone(),
}),
dependencies: Default::default(),
realm: Realm::Shared,
dep_type: DependencyType::Normal,
}
);
}

View file

@ -1,9 +0,0 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
pnpm-lock.yaml

View file

@ -1,31 +0,0 @@
/** @type { import("eslint").Linter.Config } */
module.exports = {
root: true,
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:svelte/recommended',
'prettier'
],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
parserOptions: {
sourceType: 'module',
ecmaVersion: 2020,
extraFileExtensions: ['.svelte']
},
env: {
browser: true,
es2017: true,
node: true
},
overrides: [
{
files: ['*.svelte'],
parser: 'svelte-eslint-parser',
parserOptions: {
parser: '@typescript-eslint/parser'
}
}
]
};

10
website/.gitignore vendored
View file

@ -1,10 +0,0 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

View file

@ -1 +0,0 @@
engine-strict=true

View file

@ -1,4 +0,0 @@
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View file

@ -1,17 +0,0 @@
{
"useTabs": true,
"singleQuote": true,
"trailingComma": "none",
"printWidth": 100,
"plugins": [
"prettier-plugin-svelte"
],
"overrides": [
{
"files": "*.svelte",
"options": {
"parser": "svelte"
}
}
]
}

Binary file not shown.

View file

@ -1,59 +0,0 @@
{
"name": "website",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "prettier --check . && eslint .",
"format": "prettier --write ."
},
"devDependencies": {
"@shikijs/markdown-it": "^1.1.7",
"@sveltejs/adapter-auto": "^3.0.0",
"@sveltejs/kit": "^2.0.0",
"@sveltejs/vite-plugin-svelte": "^3.0.0",
"@types/eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"autoprefixer": "^10.4.18",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-svelte": "^2.35.1",
"postcss": "^8.4.35",
"prettier": "^3.1.1",
"prettier-plugin-svelte": "^3.1.2",
"svelte": "^4.2.7",
"svelte-check": "^3.6.0",
"tailwindcss": "^3.4.1",
"tslib": "^2.4.1",
"typescript": "^5.0.0",
"vite": "^5.0.3"
},
"type": "module",
"dependencies": {
"@fontsource-variable/hepta-slab": "^5.0.19",
"@tailwindcss/typography": "^0.5.10",
"@types/markdown-it": "^13.0.7",
"@types/pako": "^2.0.3",
"@types/tar-stream": "^3.1.3",
"events": "^3.3.0",
"isomorphic-dompurify": "^2.4.0",
"lucide-svelte": "^0.358.0",
"markdown-it": "^14.0.0",
"pako": "^2.1.0",
"shiki": "^1.1.7",
"simple-svelte-autocomplete": "^2.5.2",
"tar-stream": "^3.1.7",
"yaml": "^2.4.1"
},
"pnpm": {
"patchedDependencies": {
"tar-stream@3.1.7": "patches/tar-stream@3.1.7.patch",
"simple-svelte-autocomplete@2.5.2": "patches/simple-svelte-autocomplete@2.5.2.patch"
}
}
}

View file

@ -1,16 +0,0 @@
diff --git a/package.json b/package.json
index 0a796615e65323624ae9a1fdcc7c831f39dc5158..ac84dd37cf4c95223f2727d4522b8ceb74a48dff 100644
--- a/package.json
+++ b/package.json
@@ -5,6 +5,11 @@
"svelte": "src/SimpleAutocomplete.svelte",
"module": "dist/index.mjs",
"main": "dist/index.js",
+ "exports": {
+ ".": {
+ "svelte": "./src/SimpleAutocomplete.svelte"
+ }
+ },
"devDependencies": {
"@babel/core": "^7.15.0",
"@babel/preset-env": "^7.16.11",

View file

@ -1,9 +0,0 @@
diff --git a/extract.js b/extract.js
index 0ed9f82bf287aa040dd560eabbe052316223011d..16f26d49a8b0eb554b7e0a79a076027612fb4ce1 100644
--- a/extract.js
+++ b/extract.js
@@ -1,3 +1,4 @@
+const EventEmitter = require('events')
const { Writable, Readable, getStreamError } = require('streamx')
const FIFO = require('fast-fifo')
const b4a = require('b4a')

View file

@ -1,6 +0,0 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View file

@ -1,19 +0,0 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
@layer base {
html {
@apply font-serif;
}
}
@layer utilities {
.overflow-text {
@apply min-w-0 whitespace-nowrap overflow-hidden text-ellipsis;
}
}
a {
@apply text-links underline;
}

13
website/src/app.d.ts vendored
View file

@ -1,13 +0,0 @@
// See https://kit.svelte.dev/docs/types#app
// for information about these interfaces
declare global {
namespace App {
// interface Error {}
// interface Locals {}
// interface PageData {}
// interface PageState {}
// interface Platform {}
}
}
export {};

View file

@ -1,23 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png">
<link rel="manifest" href="/site.webmanifest">
<link rel="mask-icon" href="/safari-pinned-tab.svg" color="#ffa360">
<meta name="msapplication-TileColor" content="#da532c">
<meta name="theme-color" content="#f8e4d5">
<meta content="https://pesde.daimond113.com" property="og:url" />
<meta content="https://pesde.daimond113.com/logo.png" property="og:image" />
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover" class="bg-main-background text-standard-text h-screen">
<div class="contents">%sveltekit.body%</div>
</body>
</html>

View file

@ -1,92 +0,0 @@
// // https://github.com/pstanoev/simple-svelte-autocomplete/issues/205#issuecomment-1960396289
declare module 'simple-svelte-autocomplete' {
import { SvelteComponent } from 'svelte';
import { HTMLAttributes } from 'svelte/elements';
export interface AutoCompleteAttributes<T> extends HTMLAttributes<HTMLDivElement> {
autocompleteOffValue?: string;
className?: string;
cleanUserText?: boolean;
closeOnBlur?: boolean;
create?: boolean;
createText?: string;
delay?: number;
disabled?: boolean;
dropdownClassName?: string;
flag?: boolean;
hideArrow?: boolean;
highlightedItem?: T;
html5autocomplete?: boolean;
ignoreAccents?: boolean;
inputClassName?: string;
inputId?: string;
items?: T[];
keywordsFieldName?: string;
labelFieldName?: string;
localFiltering?: boolean;
localSorting?: boolean;
lock?: boolean;
lowercaseKeywords?: boolean;
matchAllKeywords?: boolean;
maxItemsToShowInList?: number;
minCharactersToSearch?: number;
moreItemsText?: string;
multiple?: boolean;
name?: string;
noInputClassName?: boolean;
noInputStyles?: boolean;
noResultsText?: string;
orderableSection?: boolean;
placeholder?: string;
readonly?: boolean;
required?: boolean;
selectFirstIfEmpty?: boolean;
selectName?: string;
selectedItem?: T;
showClear?: boolean;
showLoadingIndicator?: boolean;
sortByMatchedKeywords?: boolean;
tabIndex?: number;
value?: T;
valueFieldName?: string;
}
export interface AutoCompleteFunctions<T> {
itemFilterFunction?: (item: T, keywords: string) => boolean;
itemSortFunction?: (item1: T, item2: T, keywords: string) => number;
keywordsCleanFunction?: (keywords: string) => string;
keywordsFunction?: (item: T) => string;
labelFunction?: (item: T) => string;
searchFunction?: (keyword: string, maxItemsToShowInList: number) => Promise<T[]> | boolean;
textCleanFunction?: (string) => string;
valueFunction?: (a: T) => string;
}
export interface AutoCompleteCallbacks<T> {
beforeChange?: (oldSelectedItem: T, newSelectedItem: T) => boolean;
onChange?: (newSelectedItem: T) => void;
onFocus?: () => void;
onBlur?: () => void;
onCreate?: (text: string) => void;
}
export interface AutoCompleteSlots<T> {
item: { item: T; label: string };
'no-results': null;
loading: { loadingText: string };
tag: null;
'dropdown-header': { nbItems: number; maxItemsToShowInList: number };
'dropdown-footer': { nbItems: number; maxItemsToShowInList: number };
}
export interface AutoCompleteProps<T>
extends AutoCompleteAttributes<T>,
AutoCompleteCallbacks<T>,
AutoCompleteFunctions<T> {}
export default class AutoComplete<T> extends SvelteComponent<
AutoCompleteProps<T>,
undefined,
AutoCompleteSlots<T>
> {}
}

View file

@ -1,14 +0,0 @@
<script context="module">
import { codeToHtml } from 'shiki';
</script>
<script lang="ts">
export let lang = 'bash';
export let code: string;
</script>
{#await codeToHtml(code, { theme: 'vesper', lang, transformers: [{ pre(node) {
this.addClassToHast(node, 'not-prose overflow-x-auto px-4 py-2 rounded-md');
} }] }) then highlightedCode}
{@html highlightedCode}
{/await}

View file

@ -1,18 +0,0 @@
import MarkdownIt from 'markdown-it';
import Shiki from '@shikijs/markdown-it';
import { writable } from 'svelte/store';
// nasty hack to get around the fact that @shikijs/markdown-it is async
export const md = writable<MarkdownIt | undefined>(undefined);
const it = MarkdownIt({
html: true
});
Promise.all([Shiki({ theme: 'vesper' })]).then((plugins) => {
for (const plugin of plugins) {
it.use(plugin);
}
md.set(it);
});

View file

@ -1,132 +0,0 @@
<script lang="ts">
import { goto } from '$app/navigation';
import '../app.css';
import '@fontsource-variable/hepta-slab';
import Autocomplete from 'simple-svelte-autocomplete';
import Menu from 'lucide-svelte/icons/menu';
import { onMount } from 'svelte';
type SearchItem = {
name: string;
version: string;
description: string;
};
const fetchSearchData = async (query: string) => {
const request = await fetch(
`${import.meta.env.VITE_API_URL}/v0/search?query=${encodeURIComponent(query)}`
);
return (await request.json()) as SearchItem[];
};
let selectedSearchItem: SearchItem | null = null;
$: {
if (selectedSearchItem) {
goto(`/packages/${selectedSearchItem.name}/${selectedSearchItem.version}`);
}
}
let linksOpen = false;
let linksRef: HTMLDivElement;
onMount(() => {
const handleClick = (event: MouseEvent) => {
if (linksOpen && !linksRef.contains(event.target as Node)) {
linksOpen = false;
}
};
document.addEventListener('click', handleClick);
return () => {
document.removeEventListener('click', handleClick);
};
});
</script>
<div class="flex flex-col px-8 lg:px-16 py-4 gap-12 items-center lg:*:max-w-6xl">
<header
class="flex-0 flex flex-col lg:flex-row relative items-center gap-4 lg:gap-0 min-h-12 w-full"
>
<div class="flex items-center gap-8 z-10">
<a href="/" class="inline-block lg:absolute top-0 left-0">
<img src="/logo.svg" alt="pesde" class="h-12" />
</a>
<div
class="relative lg:absolute lg:right-0 lg:top-1/2 lg:-translate-y-1/2 flex items-center"
bind:this={linksRef}
>
<button
type="button"
title="Toggle links"
class="hover:brightness-110 transition-[filter]"
on:click={() => {
linksOpen = !linksOpen;
}}
>
<Menu class="size-8" />
</button>
<div
class="absolute top-8 right-0 bg-paper-1-alt z-10 flex flex-col gap-4 p-4 rounded-md *:no-underline *:text-standard-text hover:*:brightness-110 *:max-w-60"
class:hidden={!linksOpen}
>
<a href="https://github.com/daimond113/pesde" class="w-max">GitHub Repository</a>
<a href="/policies">Policies</a>
<a href="/docs">Documentation</a>
</div>
</div>
</div>
<Autocomplete
inputClassName="mx-auto rounded-full text-white placeholder:opacity-75 placeholder:text-white bg-paper-1 px-3 py-1 w-full h-8 hover:brightness-110 transition-[filter]"
dropdownClassName="!bg-paper-1-alt !border-none rounded-md not-prose !p-2"
placeholder="search"
searchFunction={fetchSearchData}
delay={350}
localFiltering={false}
labelFieldName="name"
valueFieldName="name"
bind:selectedItem={selectedSearchItem}
hideArrow={true}
>
<div slot="item" let:item>
<div
class="flex flex-col justify-center w-full no-underline text-standard-text transition-[filter] h-16"
>
<div class="font-bold text-lg overflow-text">{item?.name}</div>
{#if item?.description}
<div class="overflow-text">
{item.description}
</div>
{/if}
</div>
</div>
</Autocomplete>
</header>
<div class="prose prose-pesde w-full flex-1 flex-shrink-0">
<slot />
</div>
</div>
<style>
:global(.autocomplete) {
margin-left: auto;
margin-right: auto;
max-width: 25rem !important;
}
:global(.autocomplete-list-item) {
background: #4c3c2d !important;
color: unset !important;
}
:global(.autocomplete-list-item):hover {
filter: brightness(1.1);
}
:global(.autocomplete-list-item-no-results) {
color: unset !important;
}
</style>

View file

@ -1,60 +0,0 @@
<script lang="ts">
import type { PageData } from './$types';
export let data: PageData;
const tagline =
'pesde is a package manager for Roblox that is designed to be feature-rich and easy to use.';
</script>
<svelte:head>
<title>pesde</title>
<meta content="pesde" property="og:title" />
<meta content={tagline} name="description" />
<meta content={tagline} property="og:description" />
</svelte:head>
<section
class="flex flex-col items-center lg:items-start text-center lg:text-start not-prose gap-6 my-4"
>
<h1 class="text-3xl font-extrabold text-balance">
pesde - the feature-rich Roblox package manager
</h1>
<div class="text-xl font-medium text-balance">
{tagline}
</div>
<div>
<a
href="https://github.com/daimond113/pesde?tab=readme-ov-file#installation"
class="text-standard-text no-underline rounded-md px-4 py-2 bg-paper-1 inline-block"
>Install</a
>
<a
href="https://github.com/daimond113/pesde?tab=readme-ov-file#preparing-to-publish"
class="text-standard-text no-underline rounded-md px-4 py-2 bg-paper-1-alt inline-block"
>Publish</a
>
</div>
</section>
<section>
<h2>Recently published packages</h2>
<div class="grid grid-cols-1 lg:grid-cols-2 gap-2 overflow-auto">
{#each data.latest as latestPackage}
<a
href="/packages/{latestPackage.name}/{latestPackage.version}"
class="flex flex-col justify-center p-4 bg-paper-1 rounded-md text-standard-text no-underline not-prose h-32 *:overflow-text hover:brightness-110 transition-[filter]"
>
<span class="text-sm"
>Published at <time datetime={latestPackage.published_at.toISOString()}
>{latestPackage.published_at.toLocaleString()}</time
></span
>
<div class="text-lg font-medium">{latestPackage.name}@{latestPackage.version}</div>
{#if latestPackage.description}
<div>{latestPackage.description}</div>
{/if}
</a>
{/each}
</div>
</section>

View file

@ -1,26 +0,0 @@
import { error } from '@sveltejs/kit';
import type { PageLoad } from './$types';
export const ssr = false;
export const load: PageLoad = async ({ fetch }) => {
const latestRes = await fetch(`${import.meta.env.VITE_API_URL}/v0/search`);
if (!latestRes.ok) {
error(latestRes.status, await latestRes.text());
}
const latest = (await latestRes.json()) as {
name: string;
version: string;
description?: string;
published_at: string;
}[];
return {
latest: latest.map((pkg) => ({
...pkg,
published_at: new Date(parseInt(pkg.published_at) * 1000)
}))
};
};

View file

@ -1,258 +0,0 @@
<script lang="ts">
import Codeblock from '$lib/Codeblock.svelte';
import Note from './Note.svelte';
</script>
<svelte:head>
<title>pesde documentation</title>
<meta content="pesde documentation" property="og:title" />
<meta content="Documentation about using pesde" name="description" />
<meta content="Documentation about using pesde" property="og:description" />
</svelte:head>
<div class="max-w-prose">
<h1>Using pesde</h1>
<section>
<h2>Initializing a package</h2>
<p>
Even if you're not making a package, but something else such as a game, you will still need to
initialize package.
</p>
<Codeblock code="pesde init" />
<p>This will prompt you questions, after which it will create a pesde.yaml file.</p>
<Note>
If you are using pesde with the `wally` feature enabled (true on releases from the GitHub
repository) then you can use <Codeblock code="pesde convert" /> to convert your wally.toml file
to pesde.yaml. This will leave you with an empty default index, so you will need to add a URL (such
as the default `<a href="https://github.com/daimond113/pesde-index"
>https://github.com/daimond113/pesde-index</a
>`) yourself.
</Note>
</section>
<section>
<h2>Adding dependencies</h2>
<p>
You can use the `add` command to add dependencies to your project. With the `wally` feature
enabled, you can add Wally dependencies.
</p>
<p>
If you are making a package, you can use the `--peer` argument to add a package as a peer
dependency. Peer dependencies are not installed when the package is installed, but are
required to be installed by the user of the package. This is useful for things like framework
plugins.
</p>
<p>
If you want to declare the dependency as server or development only, you can use the `--realm
server` or `--realm development` arguments respectively. The `shared` realm is the default.
</p>
<Codeblock
code="pesde add --realm server SCOPE/NAME@VERSION
pesde add --realm development wally#SCOPE/NAME@VERSION # for Wally packages"
/>
</section>
<section>
<h2>Overriding dependencies</h2>
<p>
Dependency overrides allow you to use a different version of a dependency than the one
specified in the package. This is useful for sharing 1 version of a dependency.
</p>
<p>
Dependency overrides use the keys in the format of desired names separated with `>`, and
optionally other paths separated with `,`. The value is a dependency specifier.
</p>
<Note class="mb-4">
Dependency overrides do not have a command. You will need to edit the pesde.yaml file
yourself.
</Note>
<Codeblock
lang="yaml"
code="overrides:
DESIRED_NAME>DEPENDENCY_DESIRED_NAME,DESIRED_NAME_2>DEPENDENCY_DESIRED_NAME_2:
name: SCOPE/NAME
version: VERSION"
/>
</section>
<section>
<h2>Removing dependencies</h2>
<p>You can use the `remove` command to remove dependencies from your project.</p>
<Codeblock
code="pesde remove SCOPE/NAME@VERSION
pesde remove wally#SCOPE/NAME@VERSION"
/>
</section>
<section>
<h2>Outdated dependencies</h2>
<p>
You can list outdated dependencies with the `outdated` command. This will list all
dependencies that have a newer version available.
</p>
<Note class="mb-4">
This command only supports pesde registries, so neither Git nor Wally dependencies will be
listed.
</Note>
<Codeblock code="pesde outdated" />
</section>
<section>
<h2>Installing a project</h2>
<p>The `install` command will install all dependencies of a project.</p>
<p>
You can use the `--locked` argument to skip resolving and read the dependencies from the
lockfile. If any changes were made from the time the lockfile was generated this will error.
</p>
<Codeblock code="pesde install" />
</section>
<section>
<h2>Running a bin dependency</h2>
<p>
Dependencies may export a bin script. You can run this script with the `run` command. The
script will be executed with Lune. You can use the `--` argument to pass arguments to the
script.
</p>
<Note class="mb-4">
This does <b>not</b> support Wally dependencies.
</Note>
<Codeblock code="pesde run SCOPE/NAME -- -arg" />
</section>
<section>
<h2>Patching dependencies</h2>
<p>
You can use the `patch` command to patch a dependency. This will output a directory in which
you can edit the dependency. After you are done, run the `patch-commit` command with the
directory as an argument to commit the changes.
</p>
<Codeblock
code="pesde patch SCOPE/NAME@VERSION
pesde patch-commit DIRECTORY"
/>
</section>
<section>
<h2>Publishing a package</h2>
<p>
You can publish a package with the `publish` command. This will upload the package to the
registry. This will publish to the `default` index.
</p>
<Note class="mb-4"
>The official pesde registry does not support publishing packages with Wally or Git
dependencies. Dependency overrides and patches of your package as a dependency will be
ignored.</Note
>
<Codeblock code="pesde publish" />
<p>
Please look at the <a href="#cheatsheet">manifest format cheat sheet</a> for more information about
the pesde.yaml file before publishing.
</p>
</section>
<section>
<h2>Searching for packages</h2>
<p>
You can search for packages with the `search` command. This will list all packages that match
the query. It will search by name and description.
</p>
<Codeblock code="pesde search QUERY" />
</section>
<section>
<h2 id="cheatsheet">Manifest format cheat sheet</h2>
<p>
Here is a cheat sheet for the manifest format. This is the format of the pesde.yaml file. The
`name` and `version` fields are required. All other fields are optional.
</p>
<p>A description of each type:</p>
<ul>
<li>PACKAGE_NAME: either a STANDARD_PACKAGE_NAME or WALLY_PACKAGE_NAME</li>
<li>STANDARD_PACKAGE_NAME: refers to a package name used by pesde</li>
<li>
WALLY_PACKAGE_NAME: refers to a package name used by Wally. This will usually be prefixed
with `wally#`, although not required when this rather than `PACKAGE_NAME` is the type
</li>
<li>VERSION: a semver version specifier</li>
<li>VERSION_REQ: a semver version requirement</li>
<li>REALM: one of `shared`, `server`, or `development`</li>
<li>COMMAND: a command to run</li>
<li>
DEPENDENCY_SPECIFIER: one of REGISTRY_DEPENDENCY_SPECIFIER, GIT_DEPENDENCY_SPECIFIER,
WALLY_DEPENDENCY_SPECIFIER
</li>
<li>
REGISTRY_DEPENDENCY_SPECIFIER: an object with the following structure:
<Codeblock
lang="yaml"
code="name: STANDARD_PACKAGE_NAME
version: VERSION_REQ
# OPTIONAL (name in the `indices` field) - defaults to `default`
index: STRING"
/>
</li>
<li>
GIT_DEPENDENCY_SPECIFIER: an object with the following structure:
<Codeblock
lang="yaml"
code="repo: URL
rev: STRING"
/>
</li>
<li>
WALLY_DEPENDENCY_SPECIFIER: an object with the following structure:
<Codeblock
lang="yaml"
code="wally: WALLY_PACKAGE_NAME
version: VERSION_REQ
index_url: URL"
/>
</li>
</ul>
<Codeblock
lang="yaml"
code="name: STANDARD_PACKAGE_NAME
version: VERSION
description: STRING
license: STRING
authors: STRING[]
repository: URL
exports:
lib: PATH
bin: PATH
path_style: !roblox
place: &lbrace;
REALM: STRING
&rbrace;
private: BOOL
realm: REALM
indices:
STRING: URL
# WALLY FEATURE ONLY
sourcemap_generator: COMMAND
overrides: Map<OVERRIDE_KEY, DEPENDENCY_SPECIFIER>
dependencies: Map<STRING, DEPENDENCY_SPECIFIER>
peer_dependencies: Map<STRING, DEPENDENCY_SPECIFIER>"
/>
<p>The exports field is used to specify the paths of the package's exports:</p>
<ul>
<li>
The `lib` field is a path to the file which will become the ModuleScript of the package.
This is only used for reading the types of the package.
</li>
<li>The `bin` field is a path to the file which will be ran with the `run` command.</li>
</ul>
<p>
If the realm field is not specified, it will default to `shared`. If it is another value, and
the package is to be installed in a different realm, pesde will error.
</p>
<p>
The sourcemap generator command is only used for Wally and Git packages. It will be ran in a
package's directory, and must output a sourcemap file. This is used to generate a sourcemap
for the package so that types may be found and re-exported.
</p>
</section>
</div>

View file

@ -1,10 +0,0 @@
<script lang="ts">
let className = '';
export { className as class };
</script>
<div class={'px-6 py-4 bg-paper-1-alt rounded-md ' + className}>
<div class="text-2xl font-bold">Note</div>
<slot />
</div>

View file

@ -1,199 +0,0 @@
<script context="module">
import DOMPurify from 'isomorphic-dompurify';
DOMPurify.addHook('afterSanitizeAttributes', function (node) {
if (node.tagName === 'A') {
node.setAttribute('target', '_blank');
node.setAttribute('rel', 'noopener noreferrer');
}
});
</script>
<script lang="ts">
import type { PageData } from './$types';
import { md } from '$lib/markdown';
import Codeblock from '$lib/Codeblock.svelte';
import { goto } from '$app/navigation';
import ChevronDown from 'lucide-svelte/icons/chevron-down';
import Mail from 'lucide-svelte/icons/mail';
import Globe from 'lucide-svelte/icons/globe';
import Check from 'lucide-svelte/icons/check';
import X from 'lucide-svelte/icons/x';
export let data: PageData;
$: markdown =
data.readme &&
DOMPurify.sanitize($md?.render(data.readme) ?? '', {
FORBID_TAGS: ['script', 'style', 'audio', 'iframe', 'object', 'embed', 'canvas']
});
$: allDependencies = [
[data.dependencies, 'Dependencies'],
[data.peerDependencies, 'Peer Dependencies']
] as const;
</script>
<svelte:head>
<title>{data.scope}/{data.name}@{data.version}</title>
<meta content="{data.scope}/{data.name}@{data.version} - pesde" property="og:title" />
{#if data.description}
<meta content={data.description} name="description" />
<meta content={data.description} property="og:description" />
{/if}
</svelte:head>
<div class="flex flex-col lg:flex-row">
<div class="flex-shrink flex-grow pr-4">
<div class="mb-4">
<h1 class="mb-0">{data.scope}/{data.name}</h1>
{#if data.description}
<div class="lead mt-0 mb-0">{data.description}</div>
{/if}
</div>
<main>{@html markdown}</main>
</div>
<div class="w-full lg:w-72 flex-none">
<hr class="lg:hidden" />
<div class="flex flex-col gap-4 lg:sticky top-4">
<section>
<label for="version-select" class="section-title">Version</label>
<div class="relative">
<select
class="w-full h-full px-4 py-2 rounded-full bg-paper-1 text-standard-text appearance-none hover:brightness-110 transition-[filter]"
title="Version"
id="version-select"
on:change={(event) => {
goto(`/packages/${data.scope}/${data.name}/${event.target?.value}`);
}}
>
{#each data.versions as version}
<option value={version} selected={version === data.version}>{version}</option>
{/each}
</select>
<ChevronDown class="absolute right-4 top-1/4 pointer-events-none" />
</div>
</section>
<section>
<div class="section-title">Published at</div>
<div class="flex items-center gap-2">
<time datetime={data.publishedAt.toISOString()}>{data.publishedAt.toLocaleString()}</time>
</div>
</section>
<section>
<div class="section-title">Installation</div>
<Codeblock code="pesde add {data.scope}/{data.name}@{data.version}" />
</section>
{#if data.license}
<section>
<div class="section-title">License</div>
<div>{data.license}</div>
</section>
{/if}
{#if data.repository}
<section>
<div class="section-title">Repository</div>
<a
href={data.repository}
target="_blank"
rel="noopener noreferrer"
class="block overflow-text">{data.repository}</a
>
</section>
{/if}
{#if data.authors}
<section>
<div class="section-title">Authors</div>
<ul class="not-prose">
{#each data.authors as author}
<li class="flex">
<span class="overflow-text pr-2">
{author.name}
</span>
<div class="ml-auto flex items-center gap-4">
{#if author.email}
<a href="mailto:{author.email}" title="Email {author.name}">
<Mail class="size-6" />
</a>
{/if}
{#if author.url}
<a href={author.url} title="Website of {author.name}">
<Globe class="size-6" />
</a>
{/if}
</div>
</li>
{/each}
</ul>
</section>
{/if}
{#if data.realm}
<section>
<div class="section-title">Realm</div>
<div>{data.realm}</div>
</section>
{/if}
{#each allDependencies as [dependencies, title]}
{#if dependencies && dependencies.length > 0}
<section>
<div class="section-title">{title}</div>
<ul class="not-prose">
{#each dependencies as dependency}
<li>
{#if 'name' in dependency}
<a
href="/packages/{dependency.name}/latest"
class="block overflow-text"
title="View {dependency.name}"
>
{dependency.name}@{dependency.version}
</a>
{:else}
{@const url = /.+\/.+/.test(dependency.repo)
? `https://github.com/${dependency.repo}`
: dependency.repo}
<a href={url} class="block overflow-text" title="View {dependency.repo}">
{dependency.repo}#{dependency.rev}
</a>
{/if}
</li>
{/each}
</ul>
</section>
{/if}
{/each}
<section>
<div class="section-title">Exports</div>
<ul class="not-prose">
<li>
<div class="flex items-center">
Library:
{#if data.exports.lib}
<Check class="size-6 text-green-500 inline-block ml-auto" />
{:else}
<X class="size-6 text-red-500 inline-block ml-auto" />
{/if}
</div>
</li>
<li>
<div class="flex items-center">
Binary:
{#if data.exports.bin}
<Check class="size-6 text-green-500 inline-block ml-auto" />
{:else}
<X class="size-6 text-red-500 inline-block ml-auto" />
{/if}
</div>
</li>
</ul>
</section>
</div>
</div>
</div>
<style>
.section-title {
@apply text-xl font-semibold;
}
</style>

View file

@ -1,133 +0,0 @@
import { error, redirect } from '@sveltejs/kit';
import type { PageLoad } from './$types';
import { extract } from 'tar-stream';
import { inflate } from 'pako';
import { parse } from 'yaml';
export const ssr = false;
type Dependencies = ({ name: string; version: string } | { repo: string; rev: string })[];
const parseAuthor = (author: string) => {
const authorRegex =
/^(?<name>.+?)(?:\s*<(?<email>.+?)>)?(?:\s*\((?<url>.+?)\))?(?:\s*<(?<email2>.+?)>)?(?:\s*\((?<url2>.+?)\))?$/;
const { groups } = author.match(authorRegex) ?? {};
return {
name: groups?.name ?? author,
email: groups?.email ?? groups?.email2,
url: groups?.url ?? groups?.url2
};
};
export const load: PageLoad = async ({ params, fetch }) => {
const res = await fetch(
`${import.meta.env.VITE_API_URL}/v0/packages/${params.scope}/${params.name}/${params.version}`
);
if (res.status === 404) {
error(res.status, 'Package not found');
} else if (!res.ok) {
error(res.status, await res.text());
}
const body = await res.arrayBuffer();
const extractStream = extract();
extractStream.end(inflate(body));
let manifestBuffer, readmeBuffer;
for await (const entry of extractStream) {
const read = () => {
return new Promise<Uint8Array>((resolve, reject) => {
const chunks: number[] = [];
entry.on('data', (chunk: Uint8Array) => {
chunks.push(...chunk);
});
entry.on('end', () => {
resolve(new Uint8Array(chunks));
});
entry.on('error', reject);
});
};
switch (entry.header.name.toLowerCase()) {
case 'pesde.yaml': {
manifestBuffer = await read();
break;
}
case 'readme.md':
case 'readme.txt':
case 'readme': {
readmeBuffer = await read();
break;
}
}
entry.resume();
}
if (!manifestBuffer) {
error(500, 'Package is missing pesde.yaml');
}
const textDecoder = new TextDecoder();
const manifest = textDecoder.decode(manifestBuffer);
const parsed = parse(manifest, {
customTags: [
{
tag: '!roblox',
collection: 'map'
}
]
}) as {
version: string;
authors?: string[];
description?: string;
license?: string;
repository?: string;
realm?: string;
dependencies?: Dependencies;
peer_dependencies?: Dependencies;
exports?: { lib?: string; bin?: string };
};
if (params.version.toLowerCase() === 'latest') {
redirect(302, `/packages/${params.scope}/${params.name}/${parsed.version}`);
}
const readme = readmeBuffer ? textDecoder.decode(readmeBuffer) : null;
const versionsRes = await fetch(
`${import.meta.env.VITE_API_URL}/v0/packages/${params.scope}/${params.name}/versions`
);
if (!versionsRes.ok) {
error(versionsRes.status, await versionsRes.text());
}
const versions = (await versionsRes.json()) as [string, number][];
return {
scope: params.scope,
name: params.name,
version: parsed.version,
versions: versions.map(([version]) => version),
publishedAt: new Date(
(versions.find(([version]) => version === parsed.version)?.[1] ?? 0) * 1000
),
authors: parsed.authors?.map(parseAuthor),
description: parsed.description,
license: parsed.license,
readme,
repository: parsed.repository,
realm: parsed.realm,
dependencies: parsed.dependencies,
peerDependencies: parsed.peer_dependencies,
exports: {
lib: !!parsed.exports?.lib,
bin: !!parsed.exports?.bin
}
};
};

View file

@ -1,71 +0,0 @@
<svelte:head>
<title>Policies</title>
<meta content="Policies for content on the public pesde registry" property="og:title" />
<meta content="Rules about content on the public pesde registry" name="description" />
<meta content="Rules about content on the public pesde registry" property="og:description" />
</svelte:head>
<div class="max-w-prose">
<h1>Policies for content on the public pesde registry</h1>
<p>
If anything is unclear, please <a href="mailto:pesde@daimond113.com">contact us</a> and we will be
happy to help.
</p>
<section>
<h2>Permitted content</h2>
<p>
The pesde registry is a place for open source Roblox packages. Examples of allowed content:
</p>
<ul>
<li>Libraries</li>
<li>Frameworks</li>
</ul>
Examples of disallowed content:
<ul>
<li>Malicious code</li>
<li>Illegal content</li>
</ul>
pesde is not responsible for the content of packages. If you believe a package is malicious or contains
illegal content, please
<a href="mailto:pesde@daimond113.com">contact us</a>.
</section>
<section>
<h2>Package removal</h2>
<p>
pesde does not support removing packages from the registry without a reason such as security
or complying with the law. In case you published a secret to the registry, you must regenerate
it. If you believe a package should be removed, please <a href="mailto:pesde@daimond113.com"
>contact us</a
>. We will review your request and take action if necessary.
</p>
<p>
If we find that a package is breaking the permitted content policy, we will remove it from the
registry without notice.
</p>
<p>
pesde reserves the right to remove any package from the registry at any time for any reason.
</p>
</section>
<section>
<h2>Package ownership</h2>
<p>
Packages are owned by scopes. The first person to publish to the scope owns the scope. If you
want to work as a team, the owner of the scope must send a pull request to the <a
href="https://github.com/daimond113/pesde-index">index repo</a
> adding the members' user IDs to the scope's `owners.yaml` file.
</p>
</section>
<section>
<h2>Scope squatting</h2>
<p>
Scope squatting is the act of creating a scope with the intent of preventing others from using
it. Scope squatting is not allowed. If you believe a scope is being squatted, please
<a href="mailto:pesde@daimond113.com">contact us</a>. We will review your request and take
action if necessary.
</p>
</section>
</div>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<browserconfig>
<msapplication>
<tile>
<square150x150logo src="/mstile-150x150.png"/>
<TileColor>#da532c</TileColor>
</tile>
</msapplication>
</browserconfig>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View file

@ -1,7 +0,0 @@
<svg viewBox="0 0 215 107" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 106.66V100.708H10.224V41.3322H0V35.3802H17.136V53.6682L16.944 59.1402V62.1642L17.136 66.4842V100.708H27.168V106.66H0ZM36.432 87.4602C32.208 87.4602 28.416 86.5322 25.056 84.6762C21.728 82.8202 18.976 80.1802 16.8 76.7562C14.624 73.3002 13.184 69.1722 12.48 64.3722L16.944 61.0602C16.944 64.9962 17.696 68.4842 19.2 71.5242C20.704 74.5322 22.864 76.9002 25.68 78.6282C28.496 80.3562 31.84 81.2202 35.712 81.2202C39.52 81.2202 42.832 80.3562 45.648 78.6282C48.464 76.9002 50.624 74.5162 52.128 71.4762C53.664 68.4042 54.432 64.9002 54.432 60.9642C54.432 56.9642 53.648 53.4442 52.08 50.4042C50.512 47.3642 48.32 44.9802 45.504 43.2522C42.688 41.4922 39.424 40.6122 35.712 40.6122C31.904 40.6122 28.592 41.4922 25.776 43.2522C22.96 44.9802 20.784 47.3802 19.248 50.4522C17.712 53.5242 16.944 57.0602 16.944 61.0602L15.84 50.1642H17.472C18.048 47.3482 19.168 44.7562 20.832 42.3882C22.496 39.9882 24.688 38.0682 27.408 36.6282C30.16 35.1562 33.424 34.4202 37.2 34.4202C40.784 34.4202 44.064 35.0762 47.04 36.3882C50.016 37.6682 52.576 39.5082 54.72 41.9082C56.896 44.2762 58.576 47.0762 59.76 50.3082C60.976 53.5402 61.584 57.0922 61.584 60.9642C61.584 66.1162 60.544 70.6922 58.464 74.6922C56.384 78.6922 53.456 81.8282 49.68 84.1002C45.936 86.3402 41.52 87.4602 36.432 87.4602Z" fill="#F8E4D5"/>
<path d="M72.912 59.9641C67.632 59.9641 63.056 58.8441 59.184 56.6041C55.312 54.3641 52.32 51.2761 50.208 47.3401C48.096 43.3721 47.04 38.8121 47.04 33.6601C47.04 28.4441 48.096 23.8361 50.208 19.8361C52.352 15.8361 55.328 12.7001 59.136 10.4281C62.976 8.15613 67.44 7.02013 72.528 7.02013C77.648 7.02013 82.08 8.14013 85.824 10.3801C89.568 12.6201 92.448 15.7241 94.464 19.6921C96.48 23.6601 97.488 28.2361 97.488 33.4201C97.488 34.0281 97.472 34.5081 97.44 34.8601C97.44 35.2121 97.408 35.5161 97.344 35.7721H90.528C90.56 35.3881 90.576 34.9721 90.576 34.5241C90.608 34.0441 90.624 33.5001 90.624 32.8921C90.624 29.0841 89.888 25.7081 88.416 22.7641C86.976 19.7881 84.912 17.4521 82.224 15.7561C79.536 14.0601 76.304 13.2121 72.528 13.2121C68.848 13.2121 65.616 14.0921 62.832 15.8521C60.048 17.5801 57.888 19.9801 56.352 23.0521C54.816 26.0921 54.048 29.6281 54.048 33.6601C54.048 37.5641 54.8 41.0201 56.304 44.0281C57.84 47.0041 60.016 49.3401 62.832 51.0361C65.648 52.7321 68.992 53.5801 72.864 53.5801C76.896 53.5801 80.384 52.6361 83.328 50.7481C86.304 48.8281 88.512 46.2201 89.952 42.9241L96.48 45.3721C92.7576 48.5 86.1164 49.9781 87.408 56.0761C83.28 58.6681 78.448 59.9641 72.912 59.9641ZM51.408 35.7721V30.4441H95.28L97.296 35.7721H51.408Z" fill="#F8E4D5"/>
<path d="M105.748 94.5C102.324 94.5 99.4284 93.988 97.0604 92.964C94.7244 91.94 92.8684 90.58 91.4924 88.884C90.1164 87.156 89.2204 85.236 88.8044 83.124H87.0764L88.4684 77.46C89.0764 80.82 90.7404 83.556 93.4604 85.668C96.2124 87.78 99.7804 88.836 104.164 88.836C107.908 88.836 110.804 88.084 112.852 86.58C114.932 85.044 115.972 82.82 115.972 79.908C115.972 77.252 114.948 75.188 112.9 73.716C110.852 72.244 107.268 71.124 102.148 70.356C94.8204 69.204 89.6044 67.396 86.5004 64.932C83.3964 62.436 81.8444 59.156 81.8444 55.092C81.8444 50.996 83.2684 47.716 86.1164 45.252C88.9644 42.788 92.9324 41.556 98.0204 41.556C101.38 41.556 104.148 42.068 106.324 43.092C108.532 44.116 110.228 45.492 111.412 47.22C112.596 48.916 113.364 50.788 113.716 52.836H115.444L114.052 58.26C113.476 54.996 111.908 52.34 109.348 50.292C106.788 48.212 103.444 47.172 99.3164 47.172C95.9884 47.172 93.3804 47.876 91.4924 49.284C89.6364 50.692 88.7084 52.564 88.7084 54.9C88.7084 57.364 89.8764 59.3 92.2124 60.708C94.5484 62.084 98.2284 63.172 103.252 63.972C110.356 65.092 115.396 66.932 118.372 69.492C121.348 72.052 122.836 75.508 122.836 79.86C122.836 84.436 121.364 88.02 118.42 90.612C115.508 93.204 111.284 94.5 105.748 94.5ZM81.5564 93.588V77.46H88.4684V93.588H81.5564ZM114.052 58.26V42.468H120.964L118.372 50.16L120.964 58.26H114.052Z" fill="#F8E4D5"/>
<path d="M157.3 75.84V56.736L157.588 52.992V46.896L157.3 40.176V5.952H143.524V0H164.212V70.272H173.99L173.9 75.5L157.3 75.84ZM138.052 23.76C142.276 23.76 146.052 24.688 149.38 26.544C152.74 28.4 155.508 31.056 157.684 34.512C159.86 37.936 161.3 42.048 162.004 46.848L157.588 50.16C157.588 46.224 156.82 42.736 155.284 39.696C153.78 36.656 151.62 34.288 148.804 32.592C145.988 30.864 142.644 30 138.772 30C134.964 30 131.652 30.864 128.836 32.592C126.02 34.32 123.844 36.704 122.308 39.744C120.804 42.784 120.052 46.304 120.052 50.304C120.052 54.272 120.836 57.776 122.404 60.816C123.972 63.856 126.164 66.256 128.98 68.016C131.828 69.744 135.092 70.608 138.772 70.608C142.612 70.608 145.924 69.744 148.708 68.016C151.524 66.256 153.7 63.84 155.236 60.768C156.804 57.696 157.588 54.16 157.588 50.16L158.644 61.056H157.012C156.436 63.872 155.316 66.48 153.652 68.88C151.988 71.248 149.796 73.168 147.076 74.64C144.356 76.08 141.092 76.8 137.284 76.8C133.732 76.8 130.468 76.16 127.492 74.88C124.516 73.568 121.94 71.728 119.764 69.36C117.588 66.96 115.892 64.144 114.676 60.912C113.492 57.68 112.9 54.144 112.9 50.304C112.9 45.12 113.94 40.528 116.02 36.528C118.1 32.528 121.012 29.408 124.756 27.168C128.532 24.896 132.964 23.76 138.052 23.76Z" fill="#F8E4D5"/>
<path d="M190.084 99.792C184.804 99.792 180.228 98.672 176.356 96.432C172.484 94.192 169.492 91.104 167.38 87.168C165.268 83.2 164.212 78.64 164.212 73.488C164.212 68.272 165.268 63.664 167.38 59.664C169.524 55.664 172.5 52.528 176.308 50.256C180.148 47.984 184.612 46.848 189.7 46.848C194.82 46.848 199.252 47.968 202.996 50.208C206.74 52.448 209.62 55.552 211.636 59.52C213.652 63.488 214.66 68.064 214.66 73.248C214.66 73.856 214.644 74.336 214.612 74.688C214.612 75.04 214.58 75.344 214.516 75.6H207.7C207.732 75.216 207.748 74.8 207.748 74.352C207.78 73.872 207.796 73.328 207.796 72.72C207.796 68.912 207.06 65.536 205.588 62.592C204.148 59.616 202.084 57.28 199.396 55.584C196.708 53.888 193.476 53.04 189.7 53.04C186.02 53.04 182.788 53.92 180.004 55.68C177.22 57.408 175.06 59.808 173.524 62.88C171.988 65.92 171.22 69.456 171.22 73.488C171.22 77.392 171.972 80.848 173.476 83.856C175.012 86.832 177.188 89.168 180.004 90.864C182.82 92.56 186.164 93.408 190.036 93.408C194.068 93.408 197.556 92.464 200.5 90.576C203.476 88.656 205.684 86.048 207.124 82.752L213.652 85.2C211.764 89.712 208.74 93.28 204.58 95.904C200.452 98.496 195.62 99.792 190.084 99.792ZM168.58 75.6V70.272H212.452L214.468 75.6H168.58Z" fill="#F8E4D5"/>
</svg>

Before

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.2 KiB

View file

@ -1,39 +0,0 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="512.000000pt" height="512.000000pt" viewBox="0 0 512.000000 512.000000"
preserveAspectRatio="xMidYMid meet">
<metadata>
Created by potrace 1.14, written by Peter Selinger 2001-2017
</metadata>
<g transform="translate(0.000000,512.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<path d="M2320 5110 c-406 -39 -821 -184 -1154 -404 -642 -422 -1046 -1069
-1148 -1836 -16 -123 -16 -509 0 -625 65 -459 233 -873 501 -1230 92 -122 304
-341 426 -440 368 -299 822 -493 1305 -557 123 -16 509 -16 625 0 459 65 873
233 1230 501 122 92 341 304 440 426 299 368 493 822 557 1305 16 123 16 509
0 625 -69 486 -254 922 -551 1294 -406 507 -1012 845 -1671 931 -115 15 -443
21 -560 10z m585 -255 c756 -118 1400 -589 1735 -1270 108 -219 173 -428 217
-690 25 -154 25 -513 -1 -675 -109 -693 -514 -1296 -1112 -1654 -252 -150
-534 -251 -849 -303 -154 -25 -513 -25 -675 1 -419 66 -824 247 -1135 508
-452 379 -724 859 -822 1453 -25 154 -25 513 1 675 119 762 588 1404 1271
1740 256 126 526 202 830 234 85 9 444 -4 540 -19z"/>
<path d="M3570 4144 c-219 -47 -388 -224 -434 -452 -8 -41 -17 -76 -19 -79 -3
-2 -19 3 -35 11 -84 43 -205 68 -353 73 -161 6 -264 -9 -369 -51 -210 -86
-384 -285 -445 -511 -10 -36 -22 -62 -26 -59 -5 3 -9 135 -9 295 l0 289 -340
0 -340 0 0 -115 0 -115 205 0 205 0 0 -1190 0 -1190 -205 0 -205 0 0 -115 0
-115 545 0 545 0 0 115 0 115 -202 2 -203 3 -3 463 c-1 254 1 462 5 462 5 0
19 -17 33 -37 38 -56 144 -158 212 -204 223 -152 528 -196 819 -120 256 68
471 244 593 488 90 180 121 339 113 583 -3 91 -10 181 -15 200 l-9 35 101 1
c174 2 302 50 414 158 59 56 122 145 122 172 0 15 -144 68 -148 54 -2 -5 -23
-37 -47 -69 -83 -113 -191 -164 -345 -165 -136 -1 -139 0 -184 91 -53 107
-100 176 -175 255 l-63 68 497 0 498 0 -6 98 c-8 118 -25 184 -73 281 -109
221 -379 335 -654 275z m285 -149 c69 -20 152 -80 197 -144 37 -53 74 -149 80
-206 l3 -30 -422 -3 c-485 -3 -438 -14 -403 101 72 233 299 351 545 282z
m-1035 -560 c36 -9 103 -34 150 -57 117 -58 247 -186 309 -306 109 -211 128
-494 51 -735 -40 -125 -90 -207 -185 -302 -63 -64 -98 -90 -165 -123 -141 -69
-301 -94 -461 -73 -311 41 -527 233 -611 543 -20 73 -23 106 -22 263 0 167 2
187 28 270 44 143 98 238 191 330 90 90 160 135 266 171 134 45 304 52 449 19z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.4 KiB

View file

@ -1,19 +0,0 @@
{
"name": "pesde",
"short_name": "pesde",
"icons": [
{
"src": "/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"theme_color": "#f8e4d5",
"background_color": "#13100F",
"display": "standalone"
}

View file

@ -1,13 +0,0 @@
import adapter from '@sveltejs/adapter-auto';
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
/** @type {import('@sveltejs/kit').Config} */
const config = {
preprocess: vitePreprocess(),
kit: {
adapter: adapter()
}
};
export default config;

View file

@ -1,41 +0,0 @@
import defaultTheme from 'tailwindcss/defaultTheme';
/** @type {import('tailwindcss').Config} */
export default {
content: ['./src/**/*.{html,js,svelte,ts}'],
theme: {
extend: {
colors: {
'standard-text': '#f8e4d5',
'main-background': '#13100F',
'paper-1': '#422911',
'paper-1-alt': '#4C3C2D',
links: '#ffa360'
},
fontFamily: {
serif: ['Hepta Slab Variable', defaultTheme.fontFamily.serif]
},
typography: ({ theme }) => ({
pesde: {
css: {
'--tw-prose-body': theme('colors.standard-text'),
'--tw-prose-headings': theme('colors.standard-text'),
'--tw-prose-lead': theme('colors.orange[100]'),
'--tw-prose-links': theme('colors.links'),
'--tw-prose-bold': theme('colors.orange[400]'),
'--tw-prose-counters': theme('colors.orange[300]'),
'--tw-prose-bullets': theme('colors.orange[300]'),
'--tw-prose-hr': theme('colors.orange[100]'),
'--tw-prose-quotes': theme('colors.orange[300]'),
'--tw-prose-quote-borders': theme('colors.orange[500]'),
'--tw-prose-captions': theme('colors.orange[300]'),
'--tw-prose-th-borders': theme('colors.orange[300]'),
'--tw-prose-td-borders': theme('colors.orange[300]'),
'--tw-prose-code': theme('colors.orange[300]')
}
}
})
}
},
plugins: [require('@tailwindcss/typography')]
};

View file

@ -1,14 +0,0 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"moduleResolution": "bundler"
}
}

View file

@ -1,6 +0,0 @@
import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite';
export default defineConfig({
plugins: [sveltekit()]
});