mirror of
https://github.com/pesde-pkg/pesde.git
synced 2024-12-12 11:00:36 +00:00
feat: implement registry
This commit is contained in:
parent
ea887e56ef
commit
c481826d77
24 changed files with 2678 additions and 105 deletions
1430
Cargo.lock
generated
1430
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -67,7 +67,7 @@ sha2 = "0.10.8"
|
|||
git2 = { version = "0.19.0", optional = true }
|
||||
|
||||
zip = { version = "2.1.5", optional = true }
|
||||
serde_json = { version = "1.0.120", optional = true }
|
||||
serde_json = { version = "1.0.121", optional = true }
|
||||
|
||||
anyhow = { version = "1.0.86", optional = true }
|
||||
open = { version = "5.3.0", optional = true }
|
||||
|
@ -86,7 +86,7 @@ winreg = { version = "0.52.0", optional = true }
|
|||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = []
|
||||
members = ["registry"]
|
||||
|
||||
[profile.dev.package.full_moon]
|
||||
opt-level = 3
|
||||
|
|
5
fly.toml
5
fly.toml
|
@ -13,14 +13,13 @@ kill_timeout = '5s'
|
|||
[env]
|
||||
ADDRESS = '0.0.0.0'
|
||||
PORT = '8080'
|
||||
INDEX_REPO_URL = 'https://github.com/daimond113/pesde-index'
|
||||
COMMITTER_GIT_NAME = 'Pesde Index Updater'
|
||||
COMMITTER_GIT_NAME = 'pesde index updater'
|
||||
COMMITTER_GIT_EMAIL = 'pesde@daimond113.com'
|
||||
|
||||
[http_service]
|
||||
internal_port = 8080
|
||||
force_https = true
|
||||
auto_stop_machines = true
|
||||
auto_stop_machines = "suspend"
|
||||
auto_start_machines = true
|
||||
min_machines_running = 0
|
||||
processes = ['app']
|
||||
|
|
0
registry/.env.example
Normal file
0
registry/.env.example
Normal file
40
registry/Cargo.toml
Normal file
40
registry/Cargo.toml
Normal file
|
@ -0,0 +1,40 @@
|
|||
[package]
|
||||
name = "pesde-registry"
|
||||
version = "0.7.0"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/daimond113/pesde-index"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.8.0"
|
||||
actix-web-lab = "0.20.2"
|
||||
actix-multipart = { version = "0.7.2", features = ["derive"] }
|
||||
actix-cors = "0.7.0"
|
||||
actix-governor = "0.5.0"
|
||||
dotenvy = "0.15.7"
|
||||
thiserror = "1.0.63"
|
||||
tantivy = "0.22.0"
|
||||
semver = "1.0.23"
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
|
||||
git2 = "0.19.0"
|
||||
gix = { version = "0.64.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "credentials"] }
|
||||
|
||||
serde = "1.0.204"
|
||||
serde_json = "1.0.121"
|
||||
toml = "0.8.16"
|
||||
|
||||
rusty-s3 = "0.5.0"
|
||||
reqwest = { version = "0.12.5", features = ["json", "rustls-tls"] }
|
||||
|
||||
tar = "0.4.41"
|
||||
flate2 = "1.0.30"
|
||||
|
||||
log = "0.4.22"
|
||||
pretty_env_logger = "0.5.0"
|
||||
|
||||
sentry = "0.34.0"
|
||||
sentry-log = "0.34.0"
|
||||
sentry-actix = "0.34.0"
|
||||
|
||||
pesde = { path = "..", features = ["roblox", "lune", "luau", "wally-compat", "git2"] }
|
95
registry/src/auth.rs
Normal file
95
registry/src/auth.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
use crate::AppState;
|
||||
use actix_governor::{KeyExtractor, SimpleKeyExtractionError};
|
||||
use actix_web::{
|
||||
body::MessageBody,
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::Error as ActixError,
|
||||
http::header::AUTHORIZATION,
|
||||
web, HttpMessage, HttpResponse,
|
||||
};
|
||||
use actix_web_lab::middleware::Next;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialOrd, PartialEq, Eq, Ord)]
|
||||
pub struct UserId(pub u64);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UserResponse {
|
||||
id: u64,
|
||||
}
|
||||
|
||||
pub async fn authentication(
|
||||
app_state: web::Data<AppState>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, ActixError> {
|
||||
let token = match req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.map(|token| token.to_str().unwrap())
|
||||
{
|
||||
Some(token) => token,
|
||||
None => {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body())
|
||||
}
|
||||
};
|
||||
|
||||
let token = if token.to_lowercase().starts_with("bearer ") {
|
||||
token[7..].to_string()
|
||||
} else {
|
||||
token.to_string()
|
||||
};
|
||||
|
||||
let response = match app_state
|
||||
.reqwest_client
|
||||
.get("https://api.github.com/user")
|
||||
.header(reqwest::header::AUTHORIZATION, format!("Bearer {token}"))
|
||||
.send()
|
||||
.await
|
||||
.and_then(|res| res.error_for_status())
|
||||
{
|
||||
Ok(response) => response,
|
||||
Err(e) if e.status() == Some(reqwest::StatusCode::UNAUTHORIZED) => {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body())
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("failed to get user: {e}");
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::InternalServerError().finish())
|
||||
.map_into_right_body());
|
||||
}
|
||||
};
|
||||
|
||||
let user_id = match response.json::<UserResponse>().await {
|
||||
Ok(user) => user.id,
|
||||
Err(_) => {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body())
|
||||
}
|
||||
};
|
||||
|
||||
req.extensions_mut().insert(UserId(user_id));
|
||||
|
||||
let res = next.call(req).await?;
|
||||
Ok(res.map_into_left_body())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UserIdExtractor;
|
||||
|
||||
impl KeyExtractor for UserIdExtractor {
|
||||
type Key = UserId;
|
||||
type KeyExtractionError = SimpleKeyExtractionError<&'static str>;
|
||||
|
||||
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
|
||||
match req.extensions().get::<UserId>() {
|
||||
Some(user_id) => Ok(*user_id),
|
||||
None => Err(SimpleKeyExtractionError::new("UserId not found")),
|
||||
}
|
||||
}
|
||||
}
|
4
registry/src/endpoints/mod.rs
Normal file
4
registry/src/endpoints/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
|||
pub mod package_version;
|
||||
pub mod package_versions;
|
||||
pub mod publish_version;
|
||||
pub mod search;
|
146
registry/src/endpoints/package_version.rs
Normal file
146
registry/src/endpoints/package_version.rs
Normal file
|
@ -0,0 +1,146 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse, Responder};
|
||||
use rusty_s3::{actions::GetObject, S3Action};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
use pesde::{manifest::target::TargetKind, names::PackageName, source::pesde::IndexFile};
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
package::{s3_name, PackageResponse, S3_SIGN_DURATION},
|
||||
AppState,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum VersionRequest {
|
||||
Latest,
|
||||
Specific(Version),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VersionRequest {
|
||||
fn deserialize<D>(deserializer: D) -> Result<VersionRequest, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("latest") {
|
||||
return Ok(VersionRequest::Latest);
|
||||
}
|
||||
|
||||
Version::parse(&s)
|
||||
.map(VersionRequest::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TargetRequest {
|
||||
All,
|
||||
Specific(TargetKind),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for TargetRequest {
|
||||
fn deserialize<D>(deserializer: D) -> Result<TargetRequest, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("all") {
|
||||
return Ok(TargetRequest::All);
|
||||
}
|
||||
|
||||
TargetKind::from_str(&s)
|
||||
.map(TargetRequest::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_package_version(
|
||||
request: HttpRequest,
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, VersionRequest, TargetRequest)>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let (scope, name_part) = name.as_str();
|
||||
|
||||
let versions: IndexFile = {
|
||||
let source = app_state.source.lock().unwrap();
|
||||
|
||||
match source.read_file([scope, name_part], &app_state.project)? {
|
||||
Some(versions) => toml::de::from_str(&versions)?,
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
}
|
||||
};
|
||||
|
||||
let version = match version {
|
||||
VersionRequest::Latest => versions
|
||||
.iter()
|
||||
.filter(|(v_id, _)| match target {
|
||||
TargetRequest::All => true,
|
||||
TargetRequest::Specific(target) => *v_id.target() == target,
|
||||
})
|
||||
.max_by_key(|(v, _)| v.version().clone()),
|
||||
VersionRequest::Specific(version) => versions.iter().find(|(v, _)| {
|
||||
*v.version() == version
|
||||
&& match target {
|
||||
TargetRequest::All => true,
|
||||
TargetRequest::Specific(target) => *v.target() == target,
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
let Some((v_id, entry)) = version else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let other_targets = versions
|
||||
.iter()
|
||||
.filter(|(v, _)| v.version() == v_id.version() && v.target() != v_id.target())
|
||||
.map(|(v_id, _)| v_id.target().to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if request
|
||||
.headers()
|
||||
.get(ACCEPT)
|
||||
.and_then(|accept| accept.to_str().ok())
|
||||
.is_some_and(|accept| accept.eq_ignore_ascii_case("application/octet-stream"))
|
||||
{
|
||||
let object_url = GetObject::new(
|
||||
&app_state.s3_bucket,
|
||||
Some(&app_state.s3_credentials),
|
||||
&s3_name(&name, v_id),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
return Ok(HttpResponse::Ok().body(
|
||||
app_state
|
||||
.reqwest_client
|
||||
.get(object_url)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.bytes()
|
||||
.await?,
|
||||
));
|
||||
}
|
||||
|
||||
let entry = entry.clone();
|
||||
|
||||
let mut response = serde_json::to_value(PackageResponse {
|
||||
name: name.to_string(),
|
||||
version: v_id.version().to_string(),
|
||||
target: Some(entry.target.into()),
|
||||
description: entry.description.unwrap_or_default(),
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.unwrap_or_default(),
|
||||
})?;
|
||||
|
||||
if !other_targets.is_empty() {
|
||||
response["other_targets"] = serde_json::to_value(other_targets)?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
34
registry/src/endpoints/package_versions.rs
Normal file
34
registry/src/endpoints/package_versions.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use actix_web::{web, HttpResponse, Responder};
|
||||
|
||||
use pesde::{names::PackageName, source::pesde::IndexFile};
|
||||
|
||||
use crate::{error::Error, package::PackageResponse, AppState};
|
||||
|
||||
pub async fn get_package_versions(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<PackageName>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let name = path.into_inner();
|
||||
|
||||
let (scope, name_part) = name.as_str();
|
||||
|
||||
let source = app_state.source.lock().unwrap();
|
||||
let versions: IndexFile = match source.read_file([scope, name_part], &app_state.project)? {
|
||||
Some(versions) => toml::de::from_str(&versions)?,
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
versions
|
||||
.into_iter()
|
||||
.map(|(v_id, entry)| PackageResponse {
|
||||
name: name.to_string(),
|
||||
version: v_id.version().to_string(),
|
||||
target: Some(entry.target.into()),
|
||||
description: entry.description.unwrap_or_default(),
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.unwrap_or_default(),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
}
|
270
registry/src/endpoints/publish_version.rs
Normal file
270
registry/src/endpoints/publish_version.rs
Normal file
|
@ -0,0 +1,270 @@
|
|||
use std::{
|
||||
collections::BTreeSet,
|
||||
io::{Cursor, Read, Write},
|
||||
};
|
||||
|
||||
use actix_multipart::form::{bytes::Bytes, MultipartForm};
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
use flate2::read::GzDecoder;
|
||||
use git2::{Remote, Repository, Signature};
|
||||
use rusty_s3::{actions::PutObject, S3Action};
|
||||
use tar::Archive;
|
||||
|
||||
use pesde::{
|
||||
manifest::Manifest,
|
||||
source::{
|
||||
pesde::{IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageSource,
|
||||
version_id::VersionId,
|
||||
},
|
||||
DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
auth::UserId,
|
||||
benv,
|
||||
error::Error,
|
||||
package::{s3_name, S3_SIGN_DURATION},
|
||||
search::update_version,
|
||||
AppState,
|
||||
};
|
||||
|
||||
#[derive(MultipartForm)]
|
||||
pub struct PublishBody {
|
||||
#[multipart(limit = "4 MiB")]
|
||||
tarball: Bytes,
|
||||
}
|
||||
|
||||
fn signature<'a>() -> Signature<'a> {
|
||||
Signature::now(
|
||||
&benv!(required "COMMITTER_GIT_NAME"),
|
||||
&benv!(required "COMMITTER_GIT_EMAIL"),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
|
||||
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
|
||||
let upstream_branch = upstream_branch_buf.as_str().unwrap();
|
||||
|
||||
let refspec_buf = remote
|
||||
.refspecs()
|
||||
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
|
||||
.unwrap()
|
||||
.rtransform(upstream_branch)?;
|
||||
let refspec = refspec_buf.as_str().unwrap();
|
||||
|
||||
Ok(refspec.to_string())
|
||||
}
|
||||
|
||||
const FORBIDDEN_FILES: &[&str] = &[".DS_Store", "default.project.json"];
|
||||
const FORBIDDEN_DIRECTORIES: &[&str] = &[".git"];
|
||||
|
||||
pub async fn publish_package(
|
||||
app_state: web::Data<AppState>,
|
||||
body: MultipartForm<PublishBody>,
|
||||
user_id: web::ReqData<UserId>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let bytes = body.tarball.data.to_vec();
|
||||
let mut decoder = GzDecoder::new(Cursor::new(&bytes));
|
||||
let mut archive = Archive::new(&mut decoder);
|
||||
|
||||
let entries = archive.entries()?;
|
||||
let mut manifest = None::<Manifest>;
|
||||
|
||||
for entry in entries {
|
||||
let mut entry = entry?;
|
||||
let path = entry.path()?;
|
||||
let path = path.to_str().ok_or(Error::InvalidArchive)?;
|
||||
|
||||
if entry.header().entry_type().is_dir() {
|
||||
if FORBIDDEN_DIRECTORIES.contains(&path) {
|
||||
return Err(Error::InvalidArchive);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if FORBIDDEN_FILES.contains(&path) {
|
||||
return Err(Error::InvalidArchive);
|
||||
}
|
||||
|
||||
if path == MANIFEST_FILE_NAME {
|
||||
let mut content = String::new();
|
||||
entry.read_to_string(&mut content)?;
|
||||
manifest = Some(toml::de::from_str(&content).map_err(|_| Error::InvalidArchive)?);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(manifest) = manifest else {
|
||||
return Err(Error::InvalidArchive);
|
||||
};
|
||||
|
||||
{
|
||||
let source = app_state.source.lock().unwrap();
|
||||
source.refresh(&app_state.project).map_err(Box::new)?;
|
||||
let config = source.config(&app_state.project)?;
|
||||
|
||||
if manifest
|
||||
.indices
|
||||
.get(DEFAULT_INDEX_NAME)
|
||||
.filter(|index_url| *index_url == source.repo_url())
|
||||
.is_none()
|
||||
{
|
||||
return Err(Error::InvalidArchive);
|
||||
}
|
||||
|
||||
let dependencies = manifest
|
||||
.all_dependencies()
|
||||
.map_err(|_| Error::InvalidArchive)?;
|
||||
|
||||
for (specifier, _) in dependencies.values() {
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(specifier) => {
|
||||
if specifier
|
||||
.index
|
||||
.as_ref()
|
||||
.is_some_and(|index| index != DEFAULT_INDEX_NAME)
|
||||
&& !config.other_registries_allowed
|
||||
{
|
||||
return Err(Error::InvalidArchive);
|
||||
}
|
||||
|
||||
let (dep_scope, dep_name) = specifier.name.as_str();
|
||||
if source
|
||||
.read_file([dep_scope, dep_name], &app_state.project)?
|
||||
.is_none()
|
||||
{
|
||||
return Err(Error::InvalidArchive);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let repo = source.repo_git2(&app_state.project)?;
|
||||
|
||||
let (scope, name) = manifest.name.as_str();
|
||||
let mut oids = vec![];
|
||||
|
||||
match source.read_file([scope, SCOPE_INFO_FILE], &app_state.project)? {
|
||||
Some(info) => {
|
||||
let info: ScopeInfo = toml::de::from_str(&info)?;
|
||||
if !info.owners.contains(&user_id.0) {
|
||||
return Ok(HttpResponse::Forbidden().finish());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let scope_info = toml::to_string(&ScopeInfo {
|
||||
owners: BTreeSet::from([user_id.0]),
|
||||
})?;
|
||||
|
||||
let mut blob_writer = repo.blob_writer(None)?;
|
||||
blob_writer.write_all(scope_info.as_bytes())?;
|
||||
oids.push((SCOPE_INFO_FILE, blob_writer.commit()?));
|
||||
}
|
||||
};
|
||||
|
||||
let mut entries: IndexFile = toml::de::from_str(
|
||||
&source
|
||||
.read_file([scope, name], &app_state.project)?
|
||||
.unwrap_or_default(),
|
||||
)?;
|
||||
|
||||
let new_entry = IndexFileEntry {
|
||||
target: manifest.target.clone(),
|
||||
published_at: chrono::Utc::now(),
|
||||
description: manifest.description.clone(),
|
||||
license: manifest.license.clone(),
|
||||
|
||||
dependencies,
|
||||
};
|
||||
|
||||
if entries
|
||||
.insert(
|
||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||
new_entry.clone(),
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
return Ok(HttpResponse::Conflict().finish());
|
||||
}
|
||||
|
||||
let mut remote = repo.find_remote("origin")?;
|
||||
let refspec = get_refspec(&repo, &mut remote)?;
|
||||
|
||||
let reference = repo.find_reference(&refspec)?;
|
||||
|
||||
{
|
||||
let index_content = toml::to_string(&entries)?;
|
||||
let mut blob_writer = repo.blob_writer(None)?;
|
||||
blob_writer.write_all(index_content.as_bytes())?;
|
||||
oids.push((name, blob_writer.commit()?));
|
||||
}
|
||||
|
||||
let old_root_tree = reference.peel_to_tree()?;
|
||||
let old_scope_tree = match old_root_tree.get_name(scope) {
|
||||
Some(entry) => Some(repo.find_tree(entry.id())?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut scope_tree = repo.treebuilder(old_scope_tree.as_ref())?;
|
||||
for (file, oid) in oids {
|
||||
scope_tree.insert(file, oid, 0o100644)?;
|
||||
}
|
||||
|
||||
let scope_tree_id = scope_tree.write()?;
|
||||
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
|
||||
root_tree.insert(scope, scope_tree_id, 0o040000)?;
|
||||
|
||||
let tree_oid = root_tree.write()?;
|
||||
|
||||
repo.commit(
|
||||
Some("HEAD"),
|
||||
&signature(),
|
||||
&signature(),
|
||||
&format!(
|
||||
"add {}@{} {}",
|
||||
manifest.name, manifest.version, manifest.target
|
||||
),
|
||||
&repo.find_tree(tree_oid)?,
|
||||
&[&reference.peel_to_commit()?],
|
||||
)?;
|
||||
|
||||
let mut push_options = git2::PushOptions::new();
|
||||
let mut remote_callbacks = git2::RemoteCallbacks::new();
|
||||
|
||||
let git_creds = app_state.project.auth_config().git_credentials().unwrap();
|
||||
remote_callbacks.credentials(|_, _, _| {
|
||||
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
|
||||
});
|
||||
|
||||
push_options.remote_callbacks(remote_callbacks);
|
||||
|
||||
remote.push(&[refspec], Some(&mut push_options))?;
|
||||
|
||||
update_version(&app_state, &manifest.name, new_entry);
|
||||
}
|
||||
|
||||
let object_url = PutObject::new(
|
||||
&app_state.s3_bucket,
|
||||
Some(&app_state.s3_credentials),
|
||||
&s3_name(
|
||||
&manifest.name,
|
||||
&VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
app_state
|
||||
.reqwest_client
|
||||
.put(object_url)
|
||||
.body(bytes)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().body(format!(
|
||||
"published {}@{} {}",
|
||||
manifest.name, manifest.version, manifest.target
|
||||
)))
|
||||
}
|
94
registry/src/endpoints/search.rs
Normal file
94
registry/src/endpoints/search.rs
Normal file
|
@ -0,0 +1,94 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
use serde::Deserialize;
|
||||
use tantivy::{query::AllQuery, schema::Value, DateTime, Order};
|
||||
|
||||
use pesde::{names::PackageName, source::pesde::IndexFile};
|
||||
|
||||
use crate::{error::Error, package::PackageResponse, AppState};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Request {
|
||||
#[serde(default)]
|
||||
query: Option<String>,
|
||||
#[serde(default)]
|
||||
offset: Option<usize>,
|
||||
}
|
||||
|
||||
pub async fn search_packages(
|
||||
app_state: web::Data<AppState>,
|
||||
request: web::Query<Request>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let searcher = app_state.search_reader.searcher();
|
||||
let schema = searcher.schema();
|
||||
|
||||
let id = schema.get_field("id").unwrap();
|
||||
|
||||
let scope = schema.get_field("scope").unwrap();
|
||||
let name = schema.get_field("name").unwrap();
|
||||
let description = schema.get_field("description").unwrap();
|
||||
|
||||
let query = request.query.as_deref().unwrap_or_default().trim();
|
||||
|
||||
let query = if query.is_empty() {
|
||||
Box::new(AllQuery)
|
||||
} else {
|
||||
let mut query_parser = tantivy::query::QueryParser::for_index(
|
||||
searcher.index(),
|
||||
vec![scope, name, description],
|
||||
);
|
||||
query_parser.set_field_boost(scope, 2.0);
|
||||
query_parser.set_field_boost(name, 3.5);
|
||||
|
||||
query_parser.parse_query(query)?
|
||||
};
|
||||
|
||||
let top_docs = searcher
|
||||
.search(
|
||||
&query,
|
||||
&tantivy::collector::TopDocs::with_limit(50)
|
||||
.and_offset(request.offset.unwrap_or_default())
|
||||
.order_by_fast_field::<DateTime>("published_at", Order::Desc),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let source = app_state.source.lock().unwrap();
|
||||
|
||||
let top_docs = top_docs
|
||||
.into_iter()
|
||||
.map(|(_, doc_address)| {
|
||||
let doc = searcher.doc::<HashMap<_, _>>(doc_address).unwrap();
|
||||
|
||||
let id = doc
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.parse::<PackageName>()
|
||||
.unwrap();
|
||||
let (scope, name) = id.as_str();
|
||||
|
||||
let mut versions: IndexFile = toml::de::from_str(
|
||||
&source
|
||||
.read_file([scope, name], &app_state.project)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (version_id, entry) = versions.pop_last().unwrap();
|
||||
|
||||
PackageResponse {
|
||||
name: id.to_string(),
|
||||
version: version_id.version().to_string(),
|
||||
target: None,
|
||||
description: entry.description.unwrap_or_default(),
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(top_docs))
|
||||
}
|
63
registry/src/error.rs
Normal file
63
registry/src/error.rs
Normal file
|
@ -0,0 +1,63 @@
|
|||
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
||||
use log::error;
|
||||
use pesde::source::pesde::errors::ReadFile;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error("failed to parse query")]
|
||||
Query(#[from] tantivy::query::QueryParserError),
|
||||
|
||||
#[error("error reading repo file")]
|
||||
ReadFile(#[from] ReadFile),
|
||||
|
||||
#[error("error deserializing file")]
|
||||
Deserialize(#[from] toml::de::Error),
|
||||
|
||||
#[error("error sending request")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
|
||||
#[error("failed to parse archive entries")]
|
||||
Tar(#[from] std::io::Error),
|
||||
|
||||
#[error("invalid archive")]
|
||||
InvalidArchive,
|
||||
|
||||
#[error("failed to read index config")]
|
||||
Config(#[from] pesde::source::pesde::errors::ConfigError),
|
||||
|
||||
#[error("git error")]
|
||||
Git(#[from] git2::Error),
|
||||
|
||||
#[error("failed to refresh source")]
|
||||
Refresh(#[from] Box<pesde::source::pesde::errors::RefreshError>),
|
||||
|
||||
#[error("failed to serialize struct")]
|
||||
Serialize(#[from] toml::ser::Error),
|
||||
|
||||
#[error("failed to serialize struct")]
|
||||
SerializeJson(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ErrorResponse {
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
impl ResponseError for Error {
|
||||
fn error_response(&self) -> HttpResponse<BoxBody> {
|
||||
match self {
|
||||
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("failed to parse query: {e}"),
|
||||
}),
|
||||
Error::Tar(_) | Error::InvalidArchive => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "invalid archive. ensure it has all the required files, and all the dependencies exist in the registry.".to_string(),
|
||||
}),
|
||||
e => {
|
||||
log::error!("unhandled error: {e:?}");
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
222
registry/src/main.rs
Normal file
222
registry/src/main.rs
Normal file
|
@ -0,0 +1,222 @@
|
|||
use std::{env::current_dir, fs::create_dir_all, sync::Mutex};
|
||||
|
||||
use actix_cors::Cors;
|
||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||
use actix_web::{
|
||||
middleware::{Compress, Condition, Logger, NormalizePath, TrailingSlash},
|
||||
rt::System,
|
||||
web, App, HttpServer,
|
||||
};
|
||||
use actix_web_lab::middleware::from_fn;
|
||||
use log::info;
|
||||
use rusty_s3::{Bucket, Credentials, UrlStyle};
|
||||
|
||||
use pesde::{
|
||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||
AuthConfig, Project,
|
||||
};
|
||||
|
||||
use crate::{auth::UserIdExtractor, search::make_search};
|
||||
|
||||
mod auth;
|
||||
mod endpoints;
|
||||
mod error;
|
||||
mod package;
|
||||
mod search;
|
||||
|
||||
pub struct AppState {
|
||||
pub s3_bucket: Bucket,
|
||||
pub s3_credentials: Credentials,
|
||||
|
||||
pub source: Mutex<PesdePackageSource>,
|
||||
pub project: Project,
|
||||
pub reqwest_client: reqwest::Client,
|
||||
|
||||
pub search_reader: tantivy::IndexReader,
|
||||
pub search_writer: Mutex<tantivy::IndexWriter>,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! benv {
|
||||
($name:expr) => {
|
||||
std::env::var($name)
|
||||
};
|
||||
($name:expr => $default:expr) => {
|
||||
benv!($name).unwrap_or($default.to_string())
|
||||
};
|
||||
(required $name:expr) => {
|
||||
benv!($name).expect(concat!("Environment variable `", $name, "` must be set"))
|
||||
};
|
||||
(parse $name:expr) => {
|
||||
benv!($name)
|
||||
.map(|v| v.parse().expect(concat!(
|
||||
"Environment variable `",
|
||||
$name,
|
||||
"` must be a valid value"
|
||||
)))
|
||||
};
|
||||
(parse required $name:expr) => {
|
||||
benv!(parse $name).expect(concat!("Environment variable `", $name, "` must be set"))
|
||||
};
|
||||
(parse $name:expr => $default:expr) => {
|
||||
benv!($name => $default)
|
||||
.parse()
|
||||
.expect(concat!(
|
||||
"Environment variable `",
|
||||
$name,
|
||||
"` must a valid value"
|
||||
))
|
||||
};
|
||||
}
|
||||
|
||||
async fn run(with_sentry: bool) -> std::io::Result<()> {
|
||||
let address = benv!("ADDRESS" => "127.0.0.1");
|
||||
let port: u16 = benv!(parse "PORT" => "8080");
|
||||
|
||||
let cwd = current_dir().unwrap();
|
||||
let data_dir = cwd.join("data");
|
||||
create_dir_all(&data_dir).unwrap();
|
||||
|
||||
let project = Project::new(
|
||||
&cwd,
|
||||
data_dir.join("project"),
|
||||
&cwd,
|
||||
AuthConfig::new().with_git_credentials(Some(gix::sec::identity::Account {
|
||||
username: benv!(required "GITHUB_USERNAME"),
|
||||
password: benv!(required "GITHUB_PAT"),
|
||||
})),
|
||||
);
|
||||
let source = PesdePackageSource::new(env!("CARGO_PKG_REPOSITORY").try_into().unwrap());
|
||||
source.refresh(&project).expect("failed to refresh source");
|
||||
|
||||
let (search_reader, search_writer) = make_search(&project, &source);
|
||||
|
||||
let app_data = web::Data::new(AppState {
|
||||
s3_bucket: Bucket::new(
|
||||
benv!(parse required "S3_ENDPOINT"),
|
||||
UrlStyle::Path,
|
||||
benv!(required "S3_BUCKET_NAME"),
|
||||
benv!(required "S3_REGION"),
|
||||
)
|
||||
.unwrap(),
|
||||
s3_credentials: Credentials::new(
|
||||
benv!(required "S3_ACCESS_KEY"),
|
||||
benv!(required "S3_SECRET_KEY"),
|
||||
),
|
||||
|
||||
source: Mutex::new(source),
|
||||
project,
|
||||
reqwest_client: reqwest::ClientBuilder::new()
|
||||
.user_agent(concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
"/",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
))
|
||||
.build()
|
||||
.unwrap(),
|
||||
|
||||
search_reader,
|
||||
search_writer: Mutex::new(search_writer),
|
||||
});
|
||||
|
||||
let generic_governor_config = GovernorConfigBuilder::default()
|
||||
.burst_size(500)
|
||||
.per_millisecond(500)
|
||||
.use_headers()
|
||||
.finish()
|
||||
.unwrap();
|
||||
|
||||
let publish_governor_config = GovernorConfigBuilder::default()
|
||||
.key_extractor(UserIdExtractor)
|
||||
.burst_size(12)
|
||||
.per_second(60)
|
||||
.use_headers()
|
||||
.finish()
|
||||
.unwrap();
|
||||
|
||||
info!("listening on {address}:{port}");
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(Condition::new(with_sentry, sentry_actix::Sentry::new()))
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||
.wrap(Cors::permissive())
|
||||
.wrap(Logger::default())
|
||||
.wrap(Compress::default())
|
||||
.app_data(app_data.clone())
|
||||
.route(
|
||||
"/",
|
||||
web::get().to(|| async {
|
||||
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"))
|
||||
}),
|
||||
)
|
||||
.service(
|
||||
web::scope("/v0")
|
||||
.route(
|
||||
"/search",
|
||||
web::get()
|
||||
.to(endpoints::search::search_packages)
|
||||
.wrap(Governor::new(&generic_governor_config)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}",
|
||||
web::get()
|
||||
.to(endpoints::package_versions::get_package_versions)
|
||||
.wrap(Governor::new(&generic_governor_config)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}",
|
||||
web::get()
|
||||
.to(endpoints::package_version::get_package_version)
|
||||
.wrap(Governor::new(&generic_governor_config)),
|
||||
)
|
||||
.route(
|
||||
"/packages",
|
||||
web::post()
|
||||
.to(endpoints::publish_version::publish_package)
|
||||
.wrap(Governor::new(&publish_governor_config))
|
||||
.wrap(from_fn(auth::authentication)),
|
||||
),
|
||||
)
|
||||
})
|
||||
.bind((address, port))?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
// can't use #[actix_web::main] because of Sentry:
|
||||
// "Note: Macros like #[tokio::main] and #[actix_web::main] are not supported. The Sentry client must be initialized before the async runtime is started so that all threads are correctly connected to the Hub."
|
||||
// https://docs.sentry.io/platforms/rust/guides/actix-web/
|
||||
fn main() -> std::io::Result<()> {
|
||||
let _ = dotenvy::dotenv();
|
||||
|
||||
let sentry_url = benv!("SENTRY_URL").ok();
|
||||
let with_sentry = sentry_url.is_some();
|
||||
|
||||
let mut log_builder = pretty_env_logger::formatted_builder();
|
||||
log_builder.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info"));
|
||||
|
||||
if with_sentry {
|
||||
let logger = sentry_log::SentryLogger::with_dest(log_builder.build());
|
||||
log::set_boxed_logger(Box::new(logger)).unwrap();
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
} else {
|
||||
log_builder.try_init().unwrap();
|
||||
}
|
||||
|
||||
let _guard = if let Some(sentry_url) = sentry_url {
|
||||
std::env::set_var("RUST_BACKTRACE", "1");
|
||||
|
||||
Some(sentry::init((
|
||||
sentry_url,
|
||||
sentry::ClientOptions {
|
||||
release: sentry::release_name!(),
|
||||
..Default::default()
|
||||
},
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
System::new().block_on(run(with_sentry))
|
||||
}
|
44
registry/src/package.rs
Normal file
44
registry/src/package.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use pesde::{
|
||||
manifest::target::{Target, TargetKind},
|
||||
names::PackageName,
|
||||
source::version_id::VersionId,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use std::time::Duration;
|
||||
|
||||
pub const S3_SIGN_DURATION: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
pub fn s3_name(name: &PackageName, version_id: &VersionId) -> String {
|
||||
format!("{}+{}.tar.gz", name.escaped(), version_id.escaped())
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TargetInfo {
|
||||
kind: TargetKind,
|
||||
lib: bool,
|
||||
bin: bool,
|
||||
}
|
||||
|
||||
impl From<Target> for TargetInfo {
|
||||
fn from(target: Target) -> Self {
|
||||
TargetInfo {
|
||||
kind: target.kind(),
|
||||
lib: target.lib_path().is_some(),
|
||||
bin: target.bin_path().is_some(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct PackageResponse {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub target: Option<TargetInfo>,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
pub description: String,
|
||||
pub published_at: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
pub license: String,
|
||||
}
|
79
registry/src/search.rs
Normal file
79
registry/src/search.rs
Normal file
|
@ -0,0 +1,79 @@
|
|||
use crate::AppState;
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::pesde::{IndexFileEntry, PesdePackageSource},
|
||||
Project,
|
||||
};
|
||||
use tantivy::{
|
||||
doc,
|
||||
schema::{IndexRecordOption, TextFieldIndexing, TextOptions, FAST, STORED, STRING},
|
||||
DateTime, IndexReader, IndexWriter, Term,
|
||||
};
|
||||
|
||||
pub fn make_search(project: &Project, source: &PesdePackageSource) -> (IndexReader, IndexWriter) {
|
||||
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
|
||||
|
||||
let field_options = TextOptions::default().set_indexing_options(
|
||||
TextFieldIndexing::default()
|
||||
.set_tokenizer("ngram")
|
||||
.set_index_option(IndexRecordOption::WithFreqsAndPositions),
|
||||
);
|
||||
|
||||
let id_field = schema_builder.add_text_field("id", STRING | STORED);
|
||||
let scope = schema_builder.add_text_field("scope", field_options.clone());
|
||||
let name = schema_builder.add_text_field("name", field_options.clone());
|
||||
let description = schema_builder.add_text_field("description", field_options);
|
||||
let published_at = schema_builder.add_date_field("published_at", FAST);
|
||||
|
||||
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
|
||||
search_index.tokenizers().register(
|
||||
"ngram",
|
||||
tantivy::tokenizer::NgramTokenizer::all_ngrams(1, 12).unwrap(),
|
||||
);
|
||||
|
||||
let search_reader = search_index
|
||||
.reader_builder()
|
||||
.reload_policy(tantivy::ReloadPolicy::Manual)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let mut search_writer = search_index.writer(50_000_000).unwrap();
|
||||
|
||||
for (pkg_name, mut file) in source.all_packages(project).unwrap() {
|
||||
let Some((_, latest_entry)) = file.pop_last() else {
|
||||
log::warn!("no versions found for {pkg_name}");
|
||||
continue;
|
||||
};
|
||||
|
||||
search_writer.add_document(doc!(
|
||||
id_field => pkg_name.to_string(),
|
||||
scope => pkg_name.as_str().0,
|
||||
name => pkg_name.as_str().1,
|
||||
description => latest_entry.description.unwrap_or_default(),
|
||||
published_at => DateTime::from_timestamp_secs(latest_entry.published_at.timestamp()),
|
||||
)).unwrap();
|
||||
}
|
||||
|
||||
search_writer.commit().unwrap();
|
||||
search_reader.reload().unwrap();
|
||||
|
||||
(search_reader, search_writer)
|
||||
}
|
||||
|
||||
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) {
|
||||
let mut search_writer = app_state.search_writer.lock().unwrap();
|
||||
let schema = search_writer.index().schema();
|
||||
let id_field = schema.get_field("id").unwrap();
|
||||
|
||||
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
|
||||
|
||||
search_writer.add_document(doc!(
|
||||
id_field => name.to_string(),
|
||||
schema.get_field("scope").unwrap() => name.as_str().0,
|
||||
schema.get_field("name").unwrap() => name.as_str().1,
|
||||
schema.get_field("description").unwrap() => entry.description.unwrap_or_default(),
|
||||
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp())
|
||||
)).unwrap();
|
||||
|
||||
search_writer.commit().unwrap();
|
||||
app_state.search_reader.reload().unwrap();
|
||||
}
|
|
@ -67,6 +67,8 @@ pub fn get_token_login(
|
|||
.header("Authorization", format!("Bearer {access_token}"))
|
||||
.send()
|
||||
.context("failed to send user request")?
|
||||
.error_for_status()
|
||||
.context("failed to get user")?
|
||||
.json::<UserResponse>()
|
||||
.context("failed to parse user response")?;
|
||||
|
||||
|
|
|
@ -102,6 +102,8 @@ impl LoginCommand {
|
|||
)?)
|
||||
.send()
|
||||
.context("failed to send device code request")?
|
||||
.error_for_status()
|
||||
.context("failed to get device code response")?
|
||||
.json::<DeviceCodeResponse>()
|
||||
.context("failed to parse device code response")?;
|
||||
|
||||
|
@ -146,6 +148,8 @@ impl LoginCommand {
|
|||
)?)
|
||||
.send()
|
||||
.context("failed to send access token request")?
|
||||
.error_for_status()
|
||||
.context("failed to get access token response")?
|
||||
.json::<AccessTokenResponse>()
|
||||
.context("failed to parse access token response")?;
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ impl Subcommand {
|
|||
Subcommand::Init(init) => init.run(project),
|
||||
Subcommand::Run(run) => run.run(project),
|
||||
Subcommand::Install(install) => install.run(project, multi, reqwest),
|
||||
Subcommand::Publish(publish) => publish.run(project),
|
||||
Subcommand::Publish(publish) => publish.run(project, reqwest),
|
||||
Subcommand::SelfInstall(self_install) => self_install.run(project),
|
||||
#[cfg(feature = "patches")]
|
||||
Subcommand::Patch(patch) => patch.run(project, reqwest),
|
||||
|
|
|
@ -2,8 +2,12 @@ use anyhow::Context;
|
|||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use pesde::{
|
||||
manifest::target::Target, scripts::ScriptName, Project, MANIFEST_FILE_NAME, MAX_ARCHIVE_SIZE,
|
||||
manifest::target::Target,
|
||||
scripts::ScriptName,
|
||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME, MAX_ARCHIVE_SIZE,
|
||||
};
|
||||
use reqwest::StatusCode;
|
||||
use std::path::Component;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
|
@ -14,7 +18,7 @@ pub struct PublishCommand {
|
|||
}
|
||||
|
||||
impl PublishCommand {
|
||||
pub fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
|
||||
let mut manifest = project
|
||||
.deser_manifest()
|
||||
.context("failed to read manifest")?;
|
||||
|
@ -258,10 +262,12 @@ impl PublishCommand {
|
|||
);
|
||||
|
||||
if !self.dry_run && !inquire::Confirm::new("is this information correct?").prompt()? {
|
||||
println!("{}", "publish aborted".red().bold());
|
||||
println!("\n{}", "publish aborted".red().bold());
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
let temp_manifest_path = project
|
||||
|
@ -308,6 +314,56 @@ impl PublishCommand {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
todo!("publishing to registry");
|
||||
let source = PesdePackageSource::new(
|
||||
manifest
|
||||
.indices
|
||||
.get(DEFAULT_INDEX_NAME)
|
||||
.context("missing default index")?
|
||||
.clone(),
|
||||
);
|
||||
source
|
||||
.refresh(&project)
|
||||
.context("failed to refresh source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.context("failed to get source config")?;
|
||||
|
||||
match reqwest
|
||||
.post(format!("{}/v0/packages", config.api()))
|
||||
.multipart(reqwest::blocking::multipart::Form::new().part(
|
||||
"tarball",
|
||||
reqwest::blocking::multipart::Part::bytes(archive).file_name("package.tar.gz"),
|
||||
))
|
||||
.send()
|
||||
.context("failed to send request")?
|
||||
.error_for_status()
|
||||
.and_then(|response| response.text())
|
||||
{
|
||||
Ok(response) => {
|
||||
println!("{response}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e)
|
||||
if e.status()
|
||||
.is_some_and(|status| status == StatusCode::CONFLICT) =>
|
||||
{
|
||||
println!("{}", "package version already exists".red().bold());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e)
|
||||
if e.status()
|
||||
.is_some_and(|status| status == StatusCode::FORBIDDEN) =>
|
||||
{
|
||||
println!(
|
||||
"{}",
|
||||
"unauthorized to publish under this scope".red().bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(e).context("failed to get response"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,6 +56,8 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
|
|||
))
|
||||
.send()
|
||||
.context("failed to send request to GitHub API")?
|
||||
.error_for_status()
|
||||
.context("failed to get GitHub API response")?
|
||||
.json::<Vec<Release>>()
|
||||
.context("failed to parse GitHub API response")?;
|
||||
|
||||
|
@ -108,6 +110,8 @@ pub fn download_github_release(
|
|||
))
|
||||
.send()
|
||||
.context("failed to send request to GitHub API")?
|
||||
.error_for_status()
|
||||
.context("failed to get GitHub API response")?
|
||||
.json::<Release>()
|
||||
.context("failed to parse GitHub API response")?;
|
||||
|
||||
|
@ -128,6 +132,8 @@ pub fn download_github_release(
|
|||
.header(ACCEPT, "application/octet-stream")
|
||||
.send()
|
||||
.context("failed to send request to download asset")?
|
||||
.error_for_status()
|
||||
.context("failed to download asset")?
|
||||
.bytes()
|
||||
.context("failed to download asset")?;
|
||||
|
||||
|
|
10
src/main.rs
10
src/main.rs
|
@ -152,7 +152,15 @@ fn run() -> anyhow::Result<()> {
|
|||
.build()?
|
||||
};
|
||||
|
||||
check_for_updates(&reqwest)?;
|
||||
match check_for_updates(&reqwest) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"{}",
|
||||
format!("failed to check for updates: {e}\n\n").red().bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let target_version = project
|
||||
.deser_manifest()
|
||||
|
|
|
@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize};
|
|||
|
||||
use crate::{
|
||||
manifest::{overrides::OverrideKey, target::Target},
|
||||
names::{PackageName, PackageNames},
|
||||
source::{specifiers::DependencySpecifiers, version_id::VersionId},
|
||||
names::PackageName,
|
||||
source::specifiers::DependencySpecifiers,
|
||||
};
|
||||
|
||||
pub mod overrides;
|
||||
|
@ -50,7 +50,10 @@ pub struct Manifest {
|
|||
pub includes: BTreeSet<String>,
|
||||
#[cfg(feature = "patches")]
|
||||
#[serde(default, skip_serializing)]
|
||||
pub patches: BTreeMap<PackageNames, BTreeMap<VersionId, RelativePathBuf>>,
|
||||
pub patches: BTreeMap<
|
||||
crate::names::PackageNames,
|
||||
BTreeMap<crate::source::version_id::VersionId, RelativePathBuf>,
|
||||
>,
|
||||
#[serde(default, skip_serializing)]
|
||||
pub pesde_version: Option<Version>,
|
||||
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
use gix::remote::Direction;
|
||||
use relative_path::RelativePathBuf;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, io::Read};
|
||||
|
||||
use pkg_ref::PesdePackageRef;
|
||||
use relative_path::RelativePathBuf;
|
||||
use reqwest::header::ACCEPT;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use specifier::PesdeDependencySpecifier;
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
io::Read,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
manifest::{
|
||||
|
@ -28,7 +33,12 @@ pub struct PesdePackageSource {
|
|||
repo_url: gix::Url,
|
||||
}
|
||||
|
||||
const SCOPE_INFO_FILE: &str = "scope.toml";
|
||||
pub const SCOPE_INFO_FILE: &str = "scope.toml";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ScopeInfo {
|
||||
pub owners: BTreeSet<u64>,
|
||||
}
|
||||
|
||||
impl PesdePackageSource {
|
||||
pub fn new(repo_url: gix::Url) -> Self {
|
||||
|
@ -50,21 +60,21 @@ impl PesdePackageSource {
|
|||
pub(crate) fn tree<'a>(
|
||||
&'a self,
|
||||
repo: &'a gix::Repository,
|
||||
) -> Result<gix::Tree, Box<errors::TreeError>> {
|
||||
) -> Result<gix::Tree, errors::TreeError> {
|
||||
// this is a bare repo, so this is the actual path
|
||||
let path = repo.path().to_path_buf();
|
||||
|
||||
let remote = match repo.find_default_remote(Direction::Fetch) {
|
||||
Some(Ok(remote)) => remote,
|
||||
Some(Err(e)) => return Err(Box::new(errors::TreeError::GetDefaultRemote(path, e))),
|
||||
Some(Err(e)) => return Err(errors::TreeError::GetDefaultRemote(path, Box::new(e))),
|
||||
None => {
|
||||
return Err(Box::new(errors::TreeError::NoDefaultRemote(path)));
|
||||
return Err(errors::TreeError::NoDefaultRemote(path));
|
||||
}
|
||||
};
|
||||
|
||||
let refspec = match remote.refspecs(Direction::Fetch).first() {
|
||||
Some(head) => head,
|
||||
None => return Err(Box::new(errors::TreeError::NoRefSpecs(path))),
|
||||
None => return Err(errors::TreeError::NoRefSpecs(path)),
|
||||
};
|
||||
|
||||
let spec_ref = refspec.to_ref();
|
||||
|
@ -72,59 +82,50 @@ impl PesdePackageSource {
|
|||
Some(local) => local
|
||||
.to_string()
|
||||
.replace('*', repo.branch_names().first().unwrap_or(&"main")),
|
||||
None => return Err(Box::new(errors::TreeError::NoLocalRefSpec(path))),
|
||||
None => return Err(errors::TreeError::NoLocalRefSpec(path)),
|
||||
};
|
||||
|
||||
let reference = match repo.find_reference(&local_ref) {
|
||||
Ok(reference) => reference,
|
||||
Err(e) => {
|
||||
return Err(Box::new(errors::TreeError::NoReference(
|
||||
local_ref.to_string(),
|
||||
e,
|
||||
)))
|
||||
}
|
||||
Err(e) => return Err(errors::TreeError::NoReference(local_ref.to_string(), e)),
|
||||
};
|
||||
|
||||
let reference_name = reference.name().as_bstr().to_string();
|
||||
let id = match reference.into_fully_peeled_id() {
|
||||
Ok(id) => id,
|
||||
Err(e) => return Err(Box::new(errors::TreeError::CannotPeel(reference_name, e))),
|
||||
Err(e) => return Err(errors::TreeError::CannotPeel(reference_name, e)),
|
||||
};
|
||||
|
||||
let id_str = id.to_string();
|
||||
let object = match id.object() {
|
||||
Ok(object) => object,
|
||||
Err(e) => {
|
||||
return Err(Box::new(errors::TreeError::CannotConvertToObject(
|
||||
id_str, e,
|
||||
)))
|
||||
}
|
||||
Err(e) => return Err(errors::TreeError::CannotConvertToObject(id_str, e)),
|
||||
};
|
||||
|
||||
match object.peel_to_tree() {
|
||||
Ok(tree) => Ok(tree),
|
||||
Err(e) => Err(Box::new(errors::TreeError::CannotPeelToTree(id_str, e))),
|
||||
Err(e) => Err(errors::TreeError::CannotPeelToTree(id_str, e)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read_file<
|
||||
pub fn read_file<
|
||||
I: IntoIterator<Item = P> + Clone,
|
||||
P: ToString + PartialEq<gix::bstr::BStr>,
|
||||
>(
|
||||
&self,
|
||||
file_path: I,
|
||||
project: &Project,
|
||||
) -> Result<Option<String>, Box<errors::ReadFile>> {
|
||||
) -> Result<Option<String>, errors::ReadFile> {
|
||||
let path = self.path(project);
|
||||
|
||||
let repo = match gix::open(&path) {
|
||||
Ok(repo) => repo,
|
||||
Err(e) => return Err(Box::new(errors::ReadFile::Open(path, e))),
|
||||
Err(e) => return Err(errors::ReadFile::Open(path, Box::new(e))),
|
||||
};
|
||||
|
||||
let tree = match self.tree(&repo) {
|
||||
Ok(tree) => tree,
|
||||
Err(e) => return Err(Box::new(errors::ReadFile::Tree(path, e))),
|
||||
Err(e) => return Err(errors::ReadFile::Tree(path, Box::new(e))),
|
||||
};
|
||||
|
||||
let file_path_str = file_path
|
||||
|
@ -138,36 +139,34 @@ impl PesdePackageSource {
|
|||
let entry = match tree.lookup_entry(file_path, &mut lookup_buf) {
|
||||
Ok(Some(entry)) => entry,
|
||||
Ok(None) => return Ok(None),
|
||||
Err(e) => return Err(Box::new(errors::ReadFile::Lookup(file_path_str, e))),
|
||||
Err(e) => return Err(errors::ReadFile::Lookup(file_path_str, e)),
|
||||
};
|
||||
|
||||
let object = match entry.object() {
|
||||
Ok(object) => object,
|
||||
Err(e) => return Err(Box::new(errors::ReadFile::Lookup(file_path_str, e))),
|
||||
Err(e) => return Err(errors::ReadFile::Lookup(file_path_str, e)),
|
||||
};
|
||||
|
||||
let blob = object.into_blob();
|
||||
let string = String::from_utf8(blob.data.clone())
|
||||
.map_err(|e| Box::new(errors::ReadFile::Utf8(file_path_str, e)))?;
|
||||
.map_err(|e| errors::ReadFile::Utf8(file_path_str, e))?;
|
||||
|
||||
Ok(Some(string))
|
||||
}
|
||||
|
||||
pub fn config(&self, project: &Project) -> Result<IndexConfig, Box<errors::ConfigError>> {
|
||||
let file = self
|
||||
.read_file(["config.toml"], project)
|
||||
.map_err(|e| Box::new(e.into()))?;
|
||||
pub fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
|
||||
let file = self.read_file(["config.toml"], project).map_err(Box::new)?;
|
||||
|
||||
let string = match file {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
return Err(Box::new(errors::ConfigError::Missing(
|
||||
return Err(errors::ConfigError::Missing(Box::new(
|
||||
self.repo_url.clone(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
let config: IndexConfig = toml::from_str(&string).map_err(|e| Box::new(e.into()))?;
|
||||
let config: IndexConfig = toml::from_str(&string)?;
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
@ -175,17 +174,17 @@ impl PesdePackageSource {
|
|||
pub fn all_packages(
|
||||
&self,
|
||||
project: &Project,
|
||||
) -> Result<BTreeMap<PackageName, IndexFile>, Box<errors::AllPackagesError>> {
|
||||
) -> Result<BTreeMap<PackageName, IndexFile>, errors::AllPackagesError> {
|
||||
let path = self.path(project);
|
||||
|
||||
let repo = match gix::open(&path) {
|
||||
Ok(repo) => repo,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Open(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Open(path, Box::new(e))),
|
||||
};
|
||||
|
||||
let tree = match self.tree(&repo) {
|
||||
Ok(tree) => tree,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Tree(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Tree(path, Box::new(e))),
|
||||
};
|
||||
|
||||
let mut packages = BTreeMap::<PackageName, IndexFile>::new();
|
||||
|
@ -193,12 +192,12 @@ impl PesdePackageSource {
|
|||
for entry in tree.iter() {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Decode(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Decode(path, e)),
|
||||
};
|
||||
|
||||
let object = match entry.object() {
|
||||
Ok(object) => object,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Convert(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Convert(path, e)),
|
||||
};
|
||||
|
||||
// directories will be trees, and files will be blobs
|
||||
|
@ -211,12 +210,12 @@ impl PesdePackageSource {
|
|||
for inner_entry in object.into_tree().iter() {
|
||||
let inner_entry = match inner_entry {
|
||||
Ok(entry) => entry,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Decode(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Decode(path, e)),
|
||||
};
|
||||
|
||||
let object = match inner_entry.object() {
|
||||
Ok(object) => object,
|
||||
Err(e) => return Err(Box::new(errors::AllPackagesError::Convert(path, e))),
|
||||
Err(e) => return Err(errors::AllPackagesError::Convert(path, e)),
|
||||
};
|
||||
|
||||
if !matches!(object.kind, gix::object::Kind::Blob) {
|
||||
|
@ -230,18 +229,17 @@ impl PesdePackageSource {
|
|||
}
|
||||
|
||||
let blob = object.into_blob();
|
||||
let string = String::from_utf8(blob.data.clone()).map_err(|e| {
|
||||
Box::new(errors::AllPackagesError::Utf8(package_name.to_string(), e))
|
||||
})?;
|
||||
let string = String::from_utf8(blob.data.clone())
|
||||
.map_err(|e| errors::AllPackagesError::Utf8(package_name.to_string(), e))?;
|
||||
|
||||
let file: IndexFile = match toml::from_str(&string) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
return Err(Box::new(errors::AllPackagesError::Deserialize(
|
||||
return Err(errors::AllPackagesError::Deserialize(
|
||||
package_name,
|
||||
path,
|
||||
e,
|
||||
)))
|
||||
Box::new(e),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -254,6 +252,13 @@ impl PesdePackageSource {
|
|||
|
||||
Ok(packages)
|
||||
}
|
||||
|
||||
#[cfg(feature = "git2")]
|
||||
pub fn repo_git2(&self, project: &Project) -> Result<git2::Repository, git2::Error> {
|
||||
let path = self.path(project);
|
||||
|
||||
git2::Repository::open_bare(&path)
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageSource for PesdePackageSource {
|
||||
|
@ -321,7 +326,12 @@ impl PackageSource for PesdePackageSource {
|
|||
let string = match self.read_file([scope, name], project) {
|
||||
Ok(Some(s)) => s,
|
||||
Ok(None) => return Err(Self::ResolveError::NotFound(specifier.name.to_string())),
|
||||
Err(e) => return Err(Self::ResolveError::Read(specifier.name.to_string(), e)),
|
||||
Err(e) => {
|
||||
return Err(Self::ResolveError::Read(
|
||||
specifier.name.to_string(),
|
||||
Box::new(e),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let entries: IndexFile = toml::from_str(&string)
|
||||
|
@ -363,7 +373,7 @@ impl PackageSource for PesdePackageSource {
|
|||
project: &Project,
|
||||
reqwest: &reqwest::blocking::Client,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
let config = self.config(project)?;
|
||||
let config = self.config(project).map_err(Box::new)?;
|
||||
let index_file = project
|
||||
.cas_dir
|
||||
.join("index")
|
||||
|
@ -385,21 +395,20 @@ impl PackageSource for PesdePackageSource {
|
|||
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
||||
}
|
||||
|
||||
let (scope, name) = pkg_ref.name.as_str();
|
||||
let url = config
|
||||
.download()
|
||||
.replace("{PACKAGE_SCOPE}", scope)
|
||||
.replace("{PACKAGE_NAME}", name)
|
||||
.replace("{PACKAGE_VERSION}", &pkg_ref.version.to_string());
|
||||
.replace("{PACKAGE}", &pkg_ref.name.to_string().replace("/", "%2F"))
|
||||
.replace("{PACKAGE_VERSION}", &pkg_ref.version.to_string())
|
||||
.replace("{PACKAGE_TARGET}", &pkg_ref.target.to_string());
|
||||
|
||||
let mut response = reqwest.get(url);
|
||||
let mut response = reqwest.get(url).header(ACCEPT, "application/octet-stream");
|
||||
|
||||
if let Some(token) = &project.auth_config.pesde_token {
|
||||
log::debug!("using token for pesde package download");
|
||||
response = response.header("Authorization", format!("Bearer {token}"));
|
||||
}
|
||||
|
||||
let response = response.send()?;
|
||||
let response = response.send()?.error_for_status()?;
|
||||
let bytes = response.bytes()?;
|
||||
|
||||
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
|
||||
|
@ -445,7 +454,7 @@ pub struct IndexConfig {
|
|||
#[serde(default)]
|
||||
pub git_allowed: bool,
|
||||
#[serde(default)]
|
||||
pub custom_registry_allowed: bool,
|
||||
pub other_registries_allowed: bool,
|
||||
pub github_oauth_client_id: String,
|
||||
}
|
||||
|
||||
|
@ -456,11 +465,8 @@ impl IndexConfig {
|
|||
|
||||
pub fn download(&self) -> String {
|
||||
self.download
|
||||
.as_ref()
|
||||
.unwrap_or(
|
||||
&"{API_URL}/v0/packages/{PACKAGE_SCOPE}/{PACKAGE_NAME}/{PACKAGE_VERSION}"
|
||||
.to_string(),
|
||||
)
|
||||
.as_deref()
|
||||
.unwrap_or("{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}")
|
||||
.replace("{API_URL}", self.api())
|
||||
}
|
||||
}
|
||||
|
@ -473,6 +479,8 @@ pub struct IndexFileEntry {
|
|||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub license: Option<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
|
||||
|
@ -522,14 +530,11 @@ pub mod errors {
|
|||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("error opening repository at {0}")]
|
||||
Open(PathBuf, #[source] gix::open::Error),
|
||||
|
||||
#[error("no default remote found in repository at {0}")]
|
||||
NoDefaultRemote(PathBuf),
|
||||
|
||||
#[error("error getting default remote from repository at {0}")]
|
||||
GetDefaultRemote(PathBuf, #[source] gix::remote::find::existing::Error),
|
||||
GetDefaultRemote(PathBuf, #[source] Box<gix::remote::find::existing::Error>),
|
||||
|
||||
#[error("no refspecs found in repository at {0}")]
|
||||
NoRefSpecs(PathBuf),
|
||||
|
@ -554,7 +559,7 @@ pub mod errors {
|
|||
#[non_exhaustive]
|
||||
pub enum ReadFile {
|
||||
#[error("error opening repository at {0}")]
|
||||
Open(PathBuf, #[source] gix::open::Error),
|
||||
Open(PathBuf, #[source] Box<gix::open::Error>),
|
||||
|
||||
#[error("error getting tree from repository at {0}")]
|
||||
Tree(PathBuf, #[source] Box<TreeError>),
|
||||
|
@ -595,14 +600,14 @@ pub mod errors {
|
|||
Parse(#[from] toml::de::Error),
|
||||
|
||||
#[error("missing config file for index at {0}")]
|
||||
Missing(gix::Url),
|
||||
Missing(Box<gix::Url>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum AllPackagesError {
|
||||
#[error("error opening repository at {0}")]
|
||||
Open(PathBuf, #[source] gix::open::Error),
|
||||
Open(PathBuf, #[source] Box<gix::open::Error>),
|
||||
|
||||
#[error("error getting tree from repository at {0}")]
|
||||
Tree(PathBuf, #[source] Box<TreeError>),
|
||||
|
@ -614,7 +619,7 @@ pub mod errors {
|
|||
Convert(PathBuf, #[source] gix::object::find::existing::Error),
|
||||
|
||||
#[error("error deserializing file {0} in repository at {1}")]
|
||||
Deserialize(String, PathBuf, #[source] toml::de::Error),
|
||||
Deserialize(String, PathBuf, #[source] Box<toml::de::Error>),
|
||||
|
||||
#[error("error parsing file for {0} as utf8")]
|
||||
Utf8(String, #[source] std::string::FromUtf8Error),
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{Debug, Display},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
|
@ -6,10 +11,6 @@ use crate::{
|
|||
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
||||
Project,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{Debug, Display},
|
||||
};
|
||||
|
||||
pub trait DependencySpecifier: Debug + Display {}
|
||||
|
||||
|
|
Loading…
Reference in a new issue