feat: add deprecating & yanking

This commit is contained in:
daimond113 2025-01-09 22:09:28 +01:00
parent 243dd39e14
commit 325453450b
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
35 changed files with 1259 additions and 468 deletions

7
Cargo.lock generated
View file

@ -3693,6 +3693,7 @@ dependencies = [
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"url", "url",
"urlencoding",
"wax", "wax",
"winreg", "winreg",
] ]
@ -5461,6 +5462,12 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]] [[package]]
name = "utf16_iter" name = "utf16_iter"
version = "1.0.5" version = "1.0.5"

View file

@ -67,6 +67,7 @@ sha2 = "0.10.8"
tempfile = "3.14.0" tempfile = "3.14.0"
wax = { version = "0.6.0", default-features = false } wax = { version = "0.6.0", default-features = false }
fs-err = { version = "3.0.0", features = ["tokio"] } fs-err = { version = "3.0.0", features = ["tokio"] }
urlencoding = "2.1.3"
# TODO: remove this when gitoxide adds support for: committing, pushing, adding # TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.19.0", optional = true } git2 = { version = "0.19.0", optional = true }

View file

@ -91,6 +91,13 @@ For example, you may publish a package that can be used in both Roblox and
Luau environments by publishing two versions of the package, one for each Luau environments by publishing two versions of the package, one for each
environment. environment.
<Aside type="caution">
Packages for different targets but on the same version must have
the same description.
</Aside>
## Documentation ## Documentation
The `README.md` file in the root of the package will be displayed on the The `README.md` file in the root of the package will be displayed on the

View file

@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Support deprecating and yanking packages by @daimond113
### Changed
- Asyncify blocking operations by @daimond113
## [0.1.2] ## [0.1.2]
### Changed ### Changed
- Update to pesde lib API changes by @daimond113 - Update to pesde lib API changes by @daimond113

View file

@ -0,0 +1,76 @@
use crate::{
auth::UserId,
error::{ErrorResponse, RegistryError},
git::push_changes,
package::{read_package, read_scope_info},
search::search_version_changed,
AppState,
};
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
use pesde::names::PackageName;
use std::collections::HashMap;
pub async fn deprecate_package_version(
request: HttpRequest,
app_state: web::Data<AppState>,
path: web::Path<PackageName>,
bytes: web::Bytes,
user_id: web::ReqData<UserId>,
) -> Result<HttpResponse, RegistryError> {
let deprecated = request.method() != Method::DELETE;
let reason = if deprecated {
match String::from_utf8(bytes.to_vec()).map(|s| s.trim().to_string()) {
Ok(reason) if !reason.is_empty() => reason,
Err(e) => {
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: format!("invalid utf-8: {e}"),
}))
}
_ => {
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: "deprecating must have a non-empty reason".to_string(),
}))
}
}
} else {
String::new()
};
let name = path.into_inner();
let source = app_state.source.lock().await;
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if !scope_info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish());
}
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if file.meta.deprecated == reason {
return Ok(HttpResponse::Conflict().finish());
}
file.meta.deprecated = reason;
let file_string = toml::to_string(&file)?;
push_changes(
&app_state,
&source,
name.scope().to_string(),
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
format!("{}deprecate {name}", if deprecated { "" } else { "un" },),
)
.await?;
search_version_changed(&app_state, &name, &file);
Ok(HttpResponse::Ok().body(format!(
"{}deprecated {name}",
if deprecated { "" } else { "un" },
)))
}

View file

@ -1,4 +1,6 @@
pub mod deprecate_version;
pub mod package_version; pub mod package_version;
pub mod package_versions; pub mod package_versions;
pub mod publish_version; pub mod publish_version;
pub mod search; pub mod search;
pub mod yank_version;

View file

@ -1,60 +1,14 @@
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse, Responder}; use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse};
use semver::Version; use serde::Deserialize;
use serde::{Deserialize, Deserializer};
use crate::{error::Error, package::PackageResponse, storage::StorageImpl, AppState}; use crate::{
use pesde::{ error::RegistryError,
manifest::target::TargetKind, package::{read_package, PackageResponse},
names::PackageName, request_path::{AnyOrSpecificTarget, LatestOrSpecificVersion},
source::{ storage::StorageImpl,
git_index::{read_file, root_tree, GitBasedSource}, AppState,
pesde::{DocEntryKind, IndexFile},
},
}; };
use pesde::{names::PackageName, source::pesde::DocEntryKind};
#[derive(Debug)]
pub enum VersionRequest {
Latest,
Specific(Version),
}
impl<'de> Deserialize<'de> for VersionRequest {
fn deserialize<D>(deserializer: D) -> Result<VersionRequest, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("latest") {
return Ok(VersionRequest::Latest);
}
s.parse()
.map(VersionRequest::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum TargetRequest {
Any,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for TargetRequest {
fn deserialize<D>(deserializer: D) -> Result<TargetRequest, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("any") {
return Ok(TargetRequest::Any);
}
s.parse()
.map(TargetRequest::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct Query { pub struct Query {
@ -64,65 +18,50 @@ pub struct Query {
pub async fn get_package_version( pub async fn get_package_version(
request: HttpRequest, request: HttpRequest,
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
path: web::Path<(PackageName, VersionRequest, TargetRequest)>, path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
query: web::Query<Query>, request_query: web::Query<Query>,
) -> Result<impl Responder, Error> { ) -> Result<HttpResponse, RegistryError> {
let (name, version, target) = path.into_inner(); let (name, version, target) = path.into_inner();
let (scope, name_part) = name.as_str(); let Some(file) = read_package(&app_state, &name, &*app_state.source.lock().await).await? else {
return Ok(HttpResponse::NotFound().finish());
let file: IndexFile = {
let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?;
match read_file(&tree, [scope, name_part])? {
Some(versions) => toml::de::from_str(&versions)?,
None => return Ok(HttpResponse::NotFound().finish()),
}
}; };
let Some((v_id, entry, targets)) = ({ let Some((v_id, entry)) = ({
let version = match version { let version = match version {
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() { LatestOrSpecificVersion::Latest => match file.entries.keys().map(|k| k.version()).max()
{
Some(latest) => latest.clone(), Some(latest) => latest.clone(),
None => return Ok(HttpResponse::NotFound().finish()), None => return Ok(HttpResponse::NotFound().finish()),
}, },
VersionRequest::Specific(version) => version, LatestOrSpecificVersion::Specific(version) => version,
}; };
let versions = file let mut versions = file
.entries .entries
.iter() .iter()
.filter(|(v_id, _)| *v_id.version() == version); .filter(|(v_id, _)| *v_id.version() == version);
match target { match target {
TargetRequest::Any => versions.clone().min_by_key(|(v_id, _)| *v_id.target()), AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| *v_id.target()),
TargetRequest::Specific(kind) => versions AnyOrSpecificTarget::Specific(kind) => {
.clone() versions.find(|(_, entry)| entry.target.kind() == kind)
.find(|(_, entry)| entry.target.kind() == kind), }
} }
.map(|(v_id, entry)| {
(
v_id,
entry,
versions.map(|(_, entry)| (&entry.target).into()).collect(),
)
})
}) else { }) else {
return Ok(HttpResponse::NotFound().finish()); return Ok(HttpResponse::NotFound().finish());
}; };
if let Some(doc_name) = query.doc.as_deref() { if let Some(doc_name) = request_query.doc.as_deref() {
let hash = 'finder: { let hash = 'finder: {
let mut hash = entry.docs.iter().map(|doc| &doc.kind).collect::<Vec<_>>(); let mut queue = entry.docs.iter().map(|doc| &doc.kind).collect::<Vec<_>>();
while let Some(doc) = hash.pop() { while let Some(doc) = queue.pop() {
match doc { match doc {
DocEntryKind::Page { name, hash } if name == doc_name => { DocEntryKind::Page { name, hash } if name == doc_name => {
break 'finder hash.clone() break 'finder hash.clone()
} }
DocEntryKind::Category { items, .. } => { DocEntryKind::Category { items, .. } => {
hash.extend(items.iter().map(|item| &item.kind)) queue.extend(items.iter().map(|item| &item.kind))
} }
_ => continue, _ => continue,
}; };
@ -152,20 +91,5 @@ pub async fn get_package_version(
}; };
} }
let response = PackageResponse { Ok(HttpResponse::Ok().json(PackageResponse::new(&name, v_id, &file)))
name: name.to_string(),
version: v_id.version().to_string(),
targets,
description: entry.description.clone().unwrap_or_default(),
published_at: entry.published_at,
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
};
let mut value = serde_json::to_value(response)?;
value["docs"] = serde_json::to_value(entry.docs.clone())?;
value["dependencies"] = serde_json::to_value(entry.dependencies.clone())?;
Ok(HttpResponse::Ok().json(value))
} }

View file

@ -1,54 +1,21 @@
use std::collections::{BTreeMap, BTreeSet};
use actix_web::{web, HttpResponse, Responder}; use actix_web::{web, HttpResponse, Responder};
use crate::{error::Error, package::PackageResponse, AppState}; use crate::{
use pesde::{ error::RegistryError,
names::PackageName, package::{read_package, PackageVersionsResponse},
source::{ AppState,
git_index::{read_file, root_tree, GitBasedSource},
pesde::IndexFile,
},
}; };
use pesde::names::PackageName;
pub async fn get_package_versions( pub async fn get_package_versions(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
path: web::Path<PackageName>, path: web::Path<PackageName>,
) -> Result<impl Responder, Error> { ) -> Result<impl Responder, RegistryError> {
let name = path.into_inner(); let name = path.into_inner();
let (scope, name_part) = name.as_str(); let Some(file) = read_package(&app_state, &name, &*app_state.source.lock().await).await? else {
return Ok(HttpResponse::NotFound().finish());
let file: IndexFile = {
let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?;
match read_file(&tree, [scope, name_part])? {
Some(versions) => toml::de::from_str(&versions)?,
None => return Ok(HttpResponse::NotFound().finish()),
}
}; };
let mut responses = BTreeMap::new(); Ok(HttpResponse::Ok().json(PackageVersionsResponse::new(&name, &file)))
for (v_id, entry) in file.entries {
let info = responses
.entry(v_id.version().clone())
.or_insert_with(|| PackageResponse {
name: name.to_string(),
version: v_id.version().to_string(),
targets: BTreeSet::new(),
description: entry.description.unwrap_or_default(),
published_at: entry.published_at,
license: entry.license.unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
});
info.targets.insert(entry.target.into());
info.published_at = info.published_at.max(entry.published_at);
}
Ok(HttpResponse::Ok().json(responses.into_values().collect::<Vec<_>>()))
} }

View file

@ -1,22 +1,22 @@
use crate::{ use crate::{
auth::UserId, auth::UserId,
benv, error::{ErrorResponse, RegistryError},
error::{Error, ErrorResponse}, git::push_changes,
search::update_version, package::{read_package, read_scope_info},
search::update_search_version,
storage::StorageImpl, storage::StorageImpl,
AppState, AppState,
}; };
use actix_web::{web, web::Bytes, HttpResponse, Responder}; use actix_web::{web, web::Bytes, HttpResponse};
use async_compression::Level; use async_compression::Level;
use convert_case::{Case, Casing}; use convert_case::{Case, Casing};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use git2::{Remote, Repository, Signature};
use pesde::{ use pesde::{
manifest::Manifest, manifest::Manifest,
source::{ source::{
git_index::{read_file, root_tree, GitBasedSource}, git_index::GitBasedSource,
ids::VersionId, ids::VersionId,
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE}, pesde::{DocEntry, DocEntryKind, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::RefreshOptions, traits::RefreshOptions,
IGNORED_DIRS, IGNORED_FILES, IGNORED_DIRS, IGNORED_FILES,
@ -28,35 +28,13 @@ use serde::Deserialize;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{ use std::{
collections::{BTreeSet, HashMap}, collections::{BTreeSet, HashMap},
io::{Cursor, Write}, io::Cursor,
}; };
use tokio::{ use tokio::{
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
task::JoinSet, task::JoinSet,
}; };
fn signature<'a>() -> Signature<'a> {
Signature::now(
&benv!(required "COMMITTER_GIT_NAME"),
&benv!(required "COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
let upstream_branch = upstream_branch_buf.as_str().unwrap();
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.unwrap()
.rtransform(upstream_branch)?;
let refspec = refspec_buf.as_str().unwrap();
Ok(refspec.to_string())
}
const ADDITIONAL_FORBIDDEN_FILES: &[&str] = &["default.project.json"]; const ADDITIONAL_FORBIDDEN_FILES: &[&str] = &["default.project.json"];
#[derive(Debug, Deserialize, Default)] #[derive(Debug, Deserialize, Default)]
@ -73,7 +51,7 @@ pub async fn publish_package(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
bytes: Bytes, bytes: Bytes,
user_id: web::ReqData<UserId>, user_id: web::ReqData<UserId>,
) -> Result<impl Responder, Error> { ) -> Result<HttpResponse, RegistryError> {
let source = app_state.source.lock().await; let source = app_state.source.lock().await;
source source
.refresh(&RefreshOptions { .refresh(&RefreshOptions {
@ -102,12 +80,14 @@ pub async fn publish_package(
let file_name = entry let file_name = entry
.file_name() .file_name()
.to_str() .to_str()
.ok_or_else(|| Error::InvalidArchive("file name contains non UTF-8 characters".into()))? .ok_or_else(|| {
RegistryError::InvalidArchive("file name contains non UTF-8 characters".into())
})?
.to_string(); .to_string();
if entry.file_type().await?.is_dir() { if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_str()) { if IGNORED_DIRS.contains(&file_name.as_str()) {
return Err(Error::InvalidArchive(format!( return Err(RegistryError::InvalidArchive(format!(
"archive contains forbidden directory: {file_name}" "archive contains forbidden directory: {file_name}"
))); )));
} }
@ -125,7 +105,7 @@ pub async fn publish_package(
.file_name() .file_name()
.to_str() .to_str()
.ok_or_else(|| { .ok_or_else(|| {
Error::InvalidArchive( RegistryError::InvalidArchive(
"file name contains non UTF-8 characters".into(), "file name contains non UTF-8 characters".into(),
) )
})? })?
@ -192,7 +172,7 @@ pub async fn publish_package(
let info: DocEntryInfo = let info: DocEntryInfo =
serde_yaml::from_str(&front_matter).map_err(|_| { serde_yaml::from_str(&front_matter).map_err(|_| {
Error::InvalidArchive(format!( RegistryError::InvalidArchive(format!(
"doc {file_name}'s frontmatter isn't valid YAML" "doc {file_name}'s frontmatter isn't valid YAML"
)) ))
})?; })?;
@ -208,7 +188,7 @@ pub async fn publish_package(
.with_extension("") .with_extension("")
.to_str() .to_str()
.ok_or_else(|| { .ok_or_else(|| {
Error::InvalidArchive( RegistryError::InvalidArchive(
"file name contains non UTF-8 characters".into(), "file name contains non UTF-8 characters".into(),
) )
})? })?
@ -248,7 +228,7 @@ pub async fn publish_package(
if IGNORED_FILES.contains(&file_name.as_str()) if IGNORED_FILES.contains(&file_name.as_str())
|| ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str()) || ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
{ {
return Err(Error::InvalidArchive(format!( return Err(RegistryError::InvalidArchive(format!(
"archive contains forbidden file: {file_name}" "archive contains forbidden file: {file_name}"
))); )));
} }
@ -264,7 +244,7 @@ pub async fn publish_package(
.is_some() .is_some()
{ {
if readme.is_some() { if readme.is_some() {
return Err(Error::InvalidArchive( return Err(RegistryError::InvalidArchive(
"archive contains multiple readme files".into(), "archive contains multiple readme files".into(),
)); ));
} }
@ -279,7 +259,7 @@ pub async fn publish_package(
} }
let Some(manifest) = manifest else { let Some(manifest) = manifest else {
return Err(Error::InvalidArchive( return Err(RegistryError::InvalidArchive(
"archive doesn't contain a manifest".into(), "archive doesn't contain a manifest".into(),
)); ));
}; };
@ -300,7 +280,7 @@ pub async fn publish_package(
{ {
let dependencies = manifest.all_dependencies().map_err(|e| { let dependencies = manifest.all_dependencies().map_err(|e| {
Error::InvalidArchive(format!("manifest has invalid dependencies: {e}")) RegistryError::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
})?; })?;
for (specifier, _) in dependencies.values() { for (specifier, _) in dependencies.values() {
@ -317,7 +297,7 @@ pub async fn publish_package(
}) })
.is_none() .is_none()
{ {
return Err(Error::InvalidArchive(format!( return Err(RegistryError::InvalidArchive(format!(
"invalid index in pesde dependency {specifier}" "invalid index in pesde dependency {specifier}"
))); )));
} }
@ -332,43 +312,37 @@ pub async fn publish_package(
}) })
.is_none() .is_none()
{ {
return Err(Error::InvalidArchive(format!( return Err(RegistryError::InvalidArchive(format!(
"invalid index in wally dependency {specifier}" "invalid index in wally dependency {specifier}"
))); )));
} }
} }
DependencySpecifiers::Git(specifier) => { DependencySpecifiers::Git(specifier) => {
if !config.git_allowed.is_allowed(specifier.repo.clone()) { if !config.git_allowed.is_allowed(specifier.repo.clone()) {
return Err(Error::InvalidArchive( return Err(RegistryError::InvalidArchive(
"git dependencies are not allowed".into(), "git dependencies are not allowed".into(),
)); ));
} }
} }
DependencySpecifiers::Workspace(_) => { DependencySpecifiers::Workspace(_) => {
// workspace specifiers are to be transformed into pesde specifiers by the sender // workspace specifiers are to be transformed into pesde specifiers by the sender
return Err(Error::InvalidArchive( return Err(RegistryError::InvalidArchive(
"non-transformed workspace dependency".into(), "non-transformed workspace dependency".into(),
)); ));
} }
DependencySpecifiers::Path(_) => { DependencySpecifiers::Path(_) => {
return Err(Error::InvalidArchive( return Err(RegistryError::InvalidArchive(
"path dependencies are not allowed".into(), "path dependencies are not allowed".into(),
)); ));
} }
} }
} }
let repo = Repository::open_bare(source.path(&app_state.project))?; let mut files = HashMap::new();
let gix_repo = gix::open(repo.path())?;
let gix_tree = root_tree(&gix_repo)?; let scope = read_scope_info(&app_state, manifest.name.scope(), &source).await?;
match scope {
let (scope, name) = manifest.name.as_str();
let mut oids = vec![];
match read_file(&gix_tree, [scope, SCOPE_INFO_FILE])? {
Some(info) => { Some(info) => {
let info: ScopeInfo = toml::de::from_str(&info)?;
if !info.owners.contains(&user_id.0) { if !info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish()); return Ok(HttpResponse::Forbidden().finish());
} }
@ -378,14 +352,13 @@ pub async fn publish_package(
owners: BTreeSet::from([user_id.0]), owners: BTreeSet::from([user_id.0]),
})?; })?;
let mut blob_writer = repo.blob_writer(None)?; files.insert(SCOPE_INFO_FILE.to_string(), scope_info.into_bytes());
blob_writer.write_all(scope_info.as_bytes())?;
oids.push((SCOPE_INFO_FILE, blob_writer.commit()?));
} }
}; }
let mut file: IndexFile = let mut file = read_package(&app_state, &manifest.name, &source)
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?; .await?
.unwrap_or_default();
let new_entry = IndexFileEntry { let new_entry = IndexFileEntry {
target: manifest.target.clone(), target: manifest.target.clone(),
@ -394,28 +367,21 @@ pub async fn publish_package(
license: manifest.license.clone(), license: manifest.license.clone(),
authors: manifest.authors.clone(), authors: manifest.authors.clone(),
repository: manifest.repository.clone(), repository: manifest.repository.clone(),
yanked: false,
docs, docs,
dependencies, dependencies,
}; };
let this_version = file let same_version = file
.entries .entries
.keys() .iter()
.find(|v_id| *v_id.version() == manifest.version); .find(|(v_id, _)| *v_id.version() == manifest.version);
if let Some(this_version) = this_version { if let Some((_, other_entry)) = same_version {
let other_entry = file.entries.get(this_version).unwrap();
// description cannot be different - which one to render in the "Recently published" list? // description cannot be different - which one to render in the "Recently published" list?
// the others cannot be different because what to return from the versions endpoint? if other_entry.description != new_entry.description {
if other_entry.description != new_entry.description
|| other_entry.license != new_entry.license
|| other_entry.authors != new_entry.authors
|| other_entry.repository != new_entry.repository
{
return Ok(HttpResponse::BadRequest().json(ErrorResponse { return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: "same version with different description or license already exists" error: "same versions with different descriptions are forbidden".to_string(),
.to_string(),
})); }));
} }
} }
@ -431,60 +397,24 @@ pub async fn publish_package(
return Ok(HttpResponse::Conflict().finish()); return Ok(HttpResponse::Conflict().finish());
} }
let mut remote = repo.find_remote("origin")?; files.insert(
let refspec = get_refspec(&repo, &mut remote)?; manifest.name.name().to_string(),
toml::to_string(&file)?.into_bytes(),
);
let reference = repo.find_reference(&refspec)?; push_changes(
&app_state,
{ &source,
let index_content = toml::to_string(&file)?; manifest.name.scope().to_string(),
let mut blob_writer = repo.blob_writer(None)?; files,
blob_writer.write_all(index_content.as_bytes())?; format!(
oids.push((name, blob_writer.commit()?));
}
let old_root_tree = reference.peel_to_tree()?;
let old_scope_tree = match old_root_tree.get_name(scope) {
Some(entry) => Some(repo.find_tree(entry.id())?),
None => None,
};
let mut scope_tree = repo.treebuilder(old_scope_tree.as_ref())?;
for (file, oid) in oids {
scope_tree.insert(file, oid, 0o100644)?;
}
let scope_tree_id = scope_tree.write()?;
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
root_tree.insert(scope, scope_tree_id, 0o040000)?;
let tree_oid = root_tree.write()?;
repo.commit(
Some("HEAD"),
&signature(),
&signature(),
&format!(
"add {}@{} {}", "add {}@{} {}",
manifest.name, manifest.version, manifest.target manifest.name, manifest.version, manifest.target
), ),
&repo.find_tree(tree_oid)?, )
&[&reference.peel_to_commit()?], .await?;
)?;
let mut push_options = git2::PushOptions::new(); update_search_version(&app_state, &manifest.name, &manifest.version, &new_entry);
let mut remote_callbacks = git2::RemoteCallbacks::new();
let git_creds = app_state.project.auth_config().git_credentials().unwrap();
remote_callbacks.credentials(|_, _, _| {
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
});
push_options.remote_callbacks(remote_callbacks);
remote.push(&[refspec], Some(&mut push_options))?;
update_version(&app_state, &manifest.name, new_entry);
} }
let version_id = VersionId::new(manifest.version.clone(), manifest.target.kind()); let version_id = VersionId::new(manifest.version.clone(), manifest.target.kind());
@ -527,8 +457,5 @@ pub async fn publish_package(
res.unwrap()?; res.unwrap()?;
} }
Ok(HttpResponse::Ok().body(format!( Ok(HttpResponse::Ok().body(format!("published {}@{version_id}", manifest.name)))
"published {}@{} {}",
manifest.name, manifest.version, manifest.target
)))
} }

View file

@ -1,10 +1,11 @@
use std::collections::HashMap; use std::collections::HashMap;
use actix_web::{web, HttpResponse, Responder}; use actix_web::{web, HttpResponse};
use semver::Version;
use serde::Deserialize; use serde::Deserialize;
use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order}; use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order};
use crate::{error::Error, package::PackageResponse, AppState}; use crate::{error::RegistryError, package::PackageResponse, AppState};
use pesde::{ use pesde::{
names::PackageName, names::PackageName,
source::{ source::{
@ -18,19 +19,20 @@ pub struct Request {
#[serde(default)] #[serde(default)]
query: Option<String>, query: Option<String>,
#[serde(default)] #[serde(default)]
offset: Option<usize>, offset: usize,
} }
pub async fn search_packages( pub async fn search_packages(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
request: web::Query<Request>, request_query: web::Query<Request>,
) -> Result<impl Responder, Error> { ) -> Result<HttpResponse, RegistryError> {
let searcher = app_state.search_reader.searcher(); let searcher = app_state.search_reader.searcher();
let schema = searcher.schema(); let schema = searcher.schema();
let id = schema.get_field("id").unwrap(); let id = schema.get_field("id").unwrap();
let version = schema.get_field("version").unwrap();
let query = request.query.as_deref().unwrap_or_default().trim(); let query = request_query.query.as_deref().unwrap_or_default().trim();
let query = if query.is_empty() { let query = if query.is_empty() {
Box::new(AllQuery) Box::new(AllQuery)
@ -44,7 +46,7 @@ pub async fn search_packages(
&( &(
Count, Count,
tantivy::collector::TopDocs::with_limit(50) tantivy::collector::TopDocs::with_limit(50)
.and_offset(request.offset.unwrap_or_default()) .and_offset(request_query.offset)
.order_by_fast_field::<DateTime>("published_at", Order::Desc), .order_by_fast_field::<DateTime>("published_at", Order::Desc),
), ),
) )
@ -67,36 +69,25 @@ pub async fn search_packages(
.parse::<PackageName>() .parse::<PackageName>()
.unwrap(); .unwrap();
let (scope, name) = id.as_str(); let (scope, name) = id.as_str();
let version = doc
.get(&version)
.unwrap()
.as_str()
.unwrap()
.parse::<Version>()
.unwrap();
let file: IndexFile = let file: IndexFile =
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap(); toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
let (latest_version, entry) = file let version_id = file
.entries .entries
.iter() .keys()
.max_by_key(|(v_id, _)| v_id.version()) .filter(|v_id| *v_id.version() == version)
.max()
.unwrap(); .unwrap();
PackageResponse { PackageResponse::new(&id, version_id, &file)
name: id.to_string(),
version: latest_version.version().to_string(),
targets: file
.entries
.iter()
.filter(|(v_id, _)| v_id.version() == latest_version.version())
.map(|(_, entry)| (&entry.target).into())
.collect(),
description: entry.description.clone().unwrap_or_default(),
published_at: file
.entries
.values()
.map(|entry| entry.published_at)
.max()
.unwrap(),
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
}
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -0,0 +1,83 @@
use crate::{
auth::UserId,
error::RegistryError,
git::push_changes,
package::{read_package, read_scope_info},
request_path::AllOrSpecificTarget,
search::search_version_changed,
AppState,
};
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
use pesde::names::PackageName;
use semver::Version;
use std::collections::HashMap;
pub async fn yank_package_version(
request: HttpRequest,
app_state: web::Data<AppState>,
path: web::Path<(PackageName, Version, AllOrSpecificTarget)>,
user_id: web::ReqData<UserId>,
) -> Result<HttpResponse, RegistryError> {
let yanked = request.method() != Method::DELETE;
let (name, version, target) = path.into_inner();
let source = app_state.source.lock().await;
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if !scope_info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish());
}
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
let mut targets = vec![];
for (v_id, entry) in &mut file.entries {
if *v_id.version() != version {
continue;
}
match target {
AllOrSpecificTarget::Specific(kind) if entry.target.kind() != kind => continue,
_ => {}
}
if entry.yanked == yanked {
continue;
}
targets.push(entry.target.kind().to_string());
entry.yanked = yanked;
}
if targets.is_empty() {
return Ok(HttpResponse::Conflict().finish());
}
let file_string = toml::to_string(&file)?;
push_changes(
&app_state,
&source,
name.scope().to_string(),
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
format!(
"{}yank {name}@{version} {}",
if yanked { "" } else { "un" },
targets.join(", "),
),
)
.await?;
search_version_changed(&app_state, &name, &file);
Ok(HttpResponse::Ok().body(format!(
"{}yanked {name}@{version} {}",
if yanked { "" } else { "un" },
targets.join(", "),
)))
}

View file

@ -4,7 +4,7 @@ use serde::Serialize;
use thiserror::Error; use thiserror::Error;
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum Error { pub enum RegistryError {
#[error("failed to parse query")] #[error("failed to parse query")]
Query(#[from] tantivy::query::QueryParserError), Query(#[from] tantivy::query::QueryParserError),
@ -53,16 +53,16 @@ pub struct ErrorResponse {
pub error: String, pub error: String,
} }
impl ResponseError for Error { impl ResponseError for RegistryError {
fn error_response(&self) -> HttpResponse<BoxBody> { fn error_response(&self) -> HttpResponse<BoxBody> {
match self { match self {
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse { RegistryError::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("failed to parse query: {e}"), error: format!("failed to parse query: {e}"),
}), }),
Error::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse { RegistryError::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
error: "corrupt archive".to_string(), error: "corrupt archive".to_string(),
}), }),
Error::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse { RegistryError::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("archive is invalid: {e}"), error: format!("archive is invalid: {e}"),
}), }),
e => { e => {
@ -74,16 +74,16 @@ impl ResponseError for Error {
} }
pub trait ReqwestErrorExt { pub trait ReqwestErrorExt {
async fn into_error(self) -> Result<Self, Error> async fn into_error(self) -> Result<Self, RegistryError>
where where
Self: Sized; Self: Sized;
} }
impl ReqwestErrorExt for reqwest::Response { impl ReqwestErrorExt for reqwest::Response {
async fn into_error(self) -> Result<Self, Error> { async fn into_error(self) -> Result<Self, RegistryError> {
match self.error_for_status_ref() { match self.error_for_status_ref() {
Ok(_) => Ok(self), Ok(_) => Ok(self),
Err(e) => Err(Error::ReqwestResponse(self.text().await?, e)), Err(e) => Err(RegistryError::ReqwestResponse(self.text().await?, e)),
} }
} }
} }

98
registry/src/git.rs Normal file
View file

@ -0,0 +1,98 @@
use crate::{benv, error::RegistryError, AppState};
use git2::{Remote, Repository, Signature};
use pesde::source::{git_index::GitBasedSource, pesde::PesdePackageSource};
use std::collections::HashMap;
use tokio::task::spawn_blocking;
fn signature<'a>() -> Signature<'a> {
Signature::now(
&benv!(required "COMMITTER_GIT_NAME"),
&benv!(required "COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
let upstream_branch = upstream_branch_buf.as_str().unwrap();
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.unwrap()
.rtransform(upstream_branch)?;
let refspec = refspec_buf.as_str().unwrap();
Ok(refspec.to_string())
}
const FILE_FILEMODE: i32 = 0o100644;
const DIR_FILEMODE: i32 = 0o040000;
pub async fn push_changes(
app_state: &AppState,
source: &PesdePackageSource,
directory: String,
files: HashMap<String, Vec<u8>>,
message: String,
) -> Result<(), RegistryError> {
let path = source.path(&app_state.project);
let auth_config = app_state.project.auth_config().clone();
spawn_blocking(move || {
let repo = Repository::open_bare(path)?;
let mut oids = HashMap::new();
let mut remote = repo.find_remote("origin")?;
let refspec = get_refspec(&repo, &mut remote)?;
let reference = repo.find_reference(&refspec)?;
for (name, contents) in files {
let oid = repo.blob(&contents)?;
oids.insert(name, oid);
}
let old_root_tree = reference.peel_to_tree()?;
let old_dir_tree = match old_root_tree.get_name(&directory) {
Some(entry) => Some(repo.find_tree(entry.id())?),
None => None,
};
let mut dir_tree = repo.treebuilder(old_dir_tree.as_ref())?;
for (file, oid) in oids {
dir_tree.insert(file, oid, FILE_FILEMODE)?;
}
let dir_tree_id = dir_tree.write()?;
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
root_tree.insert(directory, dir_tree_id, DIR_FILEMODE)?;
let tree_oid = root_tree.write()?;
repo.commit(
Some("HEAD"),
&signature(),
&signature(),
&message,
&repo.find_tree(tree_oid)?,
&[&reference.peel_to_commit()?],
)?;
let mut push_options = git2::PushOptions::new();
let mut remote_callbacks = git2::RemoteCallbacks::new();
let git_creds = auth_config.git_credentials().unwrap();
remote_callbacks.credentials(|_, _, _| {
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
});
push_options.remote_callbacks(remote_callbacks);
remote.push(&[refspec], Some(&mut push_options))?;
Ok(())
})
.await
.unwrap()
}

View file

@ -29,7 +29,9 @@ use tracing_subscriber::{
mod auth; mod auth;
mod endpoints; mod endpoints;
mod error; mod error;
mod git;
mod package; mod package;
mod request_path;
mod search; mod search;
mod storage; mod storage;
@ -176,12 +178,24 @@ async fn run() -> std::io::Result<()> {
.to(endpoints::package_versions::get_package_versions) .to(endpoints::package_versions::get_package_versions)
.wrap(from_fn(auth::read_mw)), .wrap(from_fn(auth::read_mw)),
) )
.service(
web::resource("/packages/{name}/deprecate")
.put(endpoints::deprecate_version::deprecate_package_version)
.delete(endpoints::deprecate_version::deprecate_package_version)
.wrap(from_fn(auth::write_mw)),
)
.route( .route(
"/packages/{name}/{version}/{target}", "/packages/{name}/{version}/{target}",
web::get() web::get()
.to(endpoints::package_version::get_package_version) .to(endpoints::package_version::get_package_version)
.wrap(from_fn(auth::read_mw)), .wrap(from_fn(auth::read_mw)),
) )
.service(
web::resource("/packages/{name}/{version}/{target}/yank")
.put(endpoints::yank_version::yank_package_version)
.delete(endpoints::yank_version::yank_package_version)
.wrap(from_fn(auth::write_mw)),
)
.service( .service(
web::scope("/packages") web::scope("/packages")
.app_data(PayloadConfig::new(config.max_archive_size)) .app_data(PayloadConfig::new(config.max_archive_size))

View file

@ -1,27 +1,34 @@
use crate::AppState;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use pesde::manifest::target::{Target, TargetKind}; use pesde::{
manifest::{
target::{Target, TargetKind},
DependencyType,
},
names::PackageName,
source::{
git_index::{read_file, root_tree, GitBasedSource},
ids::VersionId,
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, ScopeInfo, SCOPE_INFO_FILE},
specifiers::DependencySpecifiers,
},
};
use semver::Version;
use serde::Serialize; use serde::Serialize;
use std::collections::BTreeSet; use std::collections::{BTreeMap, BTreeSet};
use tokio::task::spawn_blocking;
#[derive(Debug, Serialize, Eq, PartialEq)] #[derive(Debug, Serialize, Eq, PartialEq)]
pub struct TargetInfo { struct TargetInfoInner {
kind: TargetKind,
lib: bool, lib: bool,
bin: bool, bin: bool,
#[serde(skip_serializing_if = "BTreeSet::is_empty")] #[serde(skip_serializing_if = "BTreeSet::is_empty")]
scripts: BTreeSet<String>, scripts: BTreeSet<String>,
} }
impl From<Target> for TargetInfo { impl TargetInfoInner {
fn from(target: Target) -> Self { fn new(target: &Target) -> Self {
(&target).into() TargetInfoInner {
}
}
impl From<&Target> for TargetInfo {
fn from(target: &Target) -> Self {
TargetInfo {
kind: target.kind(),
lib: target.lib_path().is_some(), lib: target.lib_path().is_some(),
bin: target.bin_path().is_some(), bin: target.bin_path().is_some(),
scripts: target scripts: target
@ -32,6 +39,25 @@ impl From<&Target> for TargetInfo {
} }
} }
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct TargetInfo {
kind: TargetKind,
#[serde(skip_serializing_if = "std::ops::Not::not")]
yanked: bool,
#[serde(flatten)]
inner: TargetInfoInner,
}
impl TargetInfo {
fn new(target: &Target, yanked: bool) -> Self {
TargetInfo {
kind: target.kind(),
yanked,
inner: TargetInfoInner::new(target),
}
}
}
impl Ord for TargetInfo { impl Ord for TargetInfo {
fn cmp(&self, other: &Self) -> std::cmp::Ordering { fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.kind.cmp(&other.kind) self.kind.cmp(&other.kind)
@ -44,18 +70,199 @@ impl PartialOrd for TargetInfo {
} }
} }
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
#[serde(untagged)]
pub enum RegistryDocEntryKind {
Page {
name: String,
},
Category {
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
items: BTreeSet<RegistryDocEntry>,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
collapsed: bool,
},
}
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
pub struct RegistryDocEntry {
label: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
position: Option<usize>,
#[serde(flatten)]
kind: RegistryDocEntryKind,
}
impl From<pesde::source::pesde::DocEntry> for RegistryDocEntry {
fn from(entry: pesde::source::pesde::DocEntry) -> Self {
Self {
label: entry.label,
position: entry.position,
kind: match entry.kind {
pesde::source::pesde::DocEntryKind::Page { name, .. } => {
RegistryDocEntryKind::Page { name }
}
pesde::source::pesde::DocEntryKind::Category { items, collapsed } => {
RegistryDocEntryKind::Category {
items: items.into_iter().map(Into::into).collect(),
collapsed,
}
}
},
}
}
}
#[derive(Debug, Serialize)]
pub struct PackageResponseInner {
published_at: DateTime<Utc>,
#[serde(skip_serializing_if = "String::is_empty")]
license: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
authors: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
repository: Option<String>,
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
docs: BTreeSet<RegistryDocEntry>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
}
impl PackageResponseInner {
pub fn new(entry: &IndexFileEntry) -> Self {
PackageResponseInner {
published_at: entry.published_at,
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
docs: entry.docs.iter().cloned().map(Into::into).collect(),
dependencies: entry.dependencies.clone(),
}
}
}
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct PackageResponse { pub struct PackageResponse {
pub name: String, name: String,
pub version: String, version: String,
pub targets: BTreeSet<TargetInfo>, targets: BTreeSet<TargetInfo>,
#[serde(skip_serializing_if = "String::is_empty")] #[serde(skip_serializing_if = "String::is_empty")]
pub description: String, description: String,
pub published_at: DateTime<Utc>,
#[serde(skip_serializing_if = "String::is_empty")] #[serde(skip_serializing_if = "String::is_empty")]
pub license: String, deprecated: String,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(flatten)]
pub authors: Vec<String>, inner: PackageResponseInner,
#[serde(skip_serializing_if = "Option::is_none")] }
pub repository: Option<String>,
impl PackageResponse {
pub fn new(name: &PackageName, version_id: &VersionId, file: &IndexFile) -> Self {
let entry = file.entries.get(version_id).unwrap();
PackageResponse {
name: name.to_string(),
version: version_id.version().to_string(),
targets: file
.entries
.iter()
.filter(|(ver, _)| ver.version() == version_id.version())
.map(|(_, entry)| TargetInfo::new(&entry.target, entry.yanked))
.collect(),
description: entry.description.clone().unwrap_or_default(),
deprecated: file.meta.deprecated.clone(),
inner: PackageResponseInner::new(entry),
}
}
}
#[derive(Debug, Serialize)]
struct PackageVersionsResponseVersionInner {
target: TargetInfoInner,
#[serde(skip_serializing_if = "std::ops::Not::not")]
yanked: bool,
#[serde(flatten)]
inner: PackageResponseInner,
}
#[derive(Debug, Serialize, Default)]
struct PackageVersionsResponseVersion {
#[serde(skip_serializing_if = "String::is_empty")]
description: String,
targets: BTreeMap<TargetKind, PackageVersionsResponseVersionInner>,
}
#[derive(Debug, Serialize)]
pub struct PackageVersionsResponse {
name: String,
#[serde(skip_serializing_if = "String::is_empty")]
deprecated: String,
versions: BTreeMap<Version, PackageVersionsResponseVersion>,
}
impl PackageVersionsResponse {
pub fn new(name: &PackageName, file: &IndexFile) -> Self {
let mut versions = BTreeMap::<Version, PackageVersionsResponseVersion>::new();
for (v_id, entry) in file.entries.iter() {
let versions_resp = versions.entry(v_id.version().clone()).or_default();
versions_resp.description = entry.description.clone().unwrap_or_default();
versions_resp.targets.insert(
entry.target.kind(),
PackageVersionsResponseVersionInner {
target: TargetInfoInner::new(&entry.target),
yanked: entry.yanked,
inner: PackageResponseInner::new(entry),
},
);
}
PackageVersionsResponse {
name: name.to_string(),
deprecated: file.meta.deprecated.clone(),
versions,
}
}
}
pub async fn read_package(
app_state: &AppState,
package: &PackageName,
source: &PesdePackageSource,
) -> Result<Option<IndexFile>, crate::error::RegistryError> {
let path = source.path(&app_state.project);
let package = package.clone();
spawn_blocking(move || {
let (scope, name) = package.as_str();
let repo = gix::open(path)?;
let tree = root_tree(&repo)?;
let Some(versions) = read_file(&tree, [scope, name])? else {
return Ok(None);
};
toml::de::from_str(&versions).map_err(Into::into)
})
.await
.unwrap()
}
pub async fn read_scope_info(
app_state: &AppState,
scope: &str,
source: &PesdePackageSource,
) -> Result<Option<ScopeInfo>, crate::error::RegistryError> {
let path = source.path(&app_state.project);
let scope = scope.to_string();
spawn_blocking(move || {
let repo = gix::open(path)?;
let tree = root_tree(&repo)?;
let Some(versions) = read_file(&tree, [&*scope, SCOPE_INFO_FILE])? else {
return Ok(None);
};
toml::de::from_str(&versions).map_err(Into::into)
})
.await
.unwrap()
} }

View file

@ -0,0 +1,69 @@
use pesde::manifest::target::TargetKind;
use semver::Version;
use serde::{Deserialize, Deserializer};
#[derive(Debug)]
pub enum LatestOrSpecificVersion {
Latest,
Specific(Version),
}
impl<'de> Deserialize<'de> for LatestOrSpecificVersion {
fn deserialize<D>(deserializer: D) -> Result<LatestOrSpecificVersion, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("latest") {
return Ok(LatestOrSpecificVersion::Latest);
}
s.parse()
.map(LatestOrSpecificVersion::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum AnyOrSpecificTarget {
Any,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for AnyOrSpecificTarget {
fn deserialize<D>(deserializer: D) -> Result<AnyOrSpecificTarget, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("any") {
return Ok(AnyOrSpecificTarget::Any);
}
s.parse()
.map(AnyOrSpecificTarget::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum AllOrSpecificTarget {
All,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for AllOrSpecificTarget {
fn deserialize<D>(deserializer: D) -> Result<AllOrSpecificTarget, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("all") {
return Ok(AllOrSpecificTarget::All);
}
s.parse()
.map(AllOrSpecificTarget::Specific)
.map_err(serde::de::Error::custom)
}
}

View file

@ -5,10 +5,12 @@ use pesde::{
names::PackageName, names::PackageName,
source::{ source::{
git_index::{root_tree, GitBasedSource}, git_index::{root_tree, GitBasedSource},
ids::VersionId,
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE}, pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE},
}, },
Project, Project,
}; };
use semver::Version;
use tantivy::{ use tantivy::{
doc, doc,
query::QueryParser, query::QueryParser,
@ -18,7 +20,7 @@ use tantivy::{
}; };
use tokio::pin; use tokio::pin;
pub async fn all_packages( async fn all_packages(
source: &PesdePackageSource, source: &PesdePackageSource,
project: &Project, project: &Project,
) -> impl Stream<Item = (PackageName, IndexFile)> { ) -> impl Stream<Item = (PackageName, IndexFile)> {
@ -67,6 +69,18 @@ pub async fn all_packages(
} }
} }
fn find_max(file: &IndexFile) -> Option<(&VersionId, &IndexFileEntry)> {
file.entries
.iter()
.filter(|(_, entry)| !entry.yanked)
.max_by(|(v_id_a, entry_a), (v_id_b, entry_b)| {
v_id_a
.version()
.cmp(v_id_b.version())
.then(entry_a.published_at.cmp(&entry_b.published_at))
})
}
pub async fn make_search( pub async fn make_search(
project: &Project, project: &Project,
source: &PesdePackageSource, source: &PesdePackageSource,
@ -80,6 +94,8 @@ pub async fn make_search(
); );
let id_field = schema_builder.add_text_field("id", STRING | STORED); let id_field = schema_builder.add_text_field("id", STRING | STORED);
let version = schema_builder.add_text_field("version", STRING | STORED);
let scope = schema_builder.add_text_field("scope", field_options.clone()); let scope = schema_builder.add_text_field("scope", field_options.clone());
let name = schema_builder.add_text_field("name", field_options.clone()); let name = schema_builder.add_text_field("name", field_options.clone());
let description = schema_builder.add_text_field("description", field_options); let description = schema_builder.add_text_field("description", field_options);
@ -103,18 +119,22 @@ pub async fn make_search(
let stream = all_packages(source, project).await; let stream = all_packages(source, project).await;
pin!(stream); pin!(stream);
while let Some((pkg_name, mut file)) = stream.next().await { while let Some((pkg_name, file)) = stream.next().await {
let Some((_, latest_entry)) = file.entries.pop_last() else { if !file.meta.deprecated.is_empty() {
tracing::error!("no versions found for {pkg_name}"); continue;
}
let Some((v_id, latest_entry)) = find_max(&file) else {
continue; continue;
}; };
search_writer search_writer
.add_document(doc!( .add_document(doc!(
id_field => pkg_name.to_string(), id_field => pkg_name.to_string(),
version => v_id.version().to_string(),
scope => pkg_name.as_str().0, scope => pkg_name.as_str().0,
name => pkg_name.as_str().1, name => pkg_name.as_str().1,
description => latest_entry.description.unwrap_or_default(), description => latest_entry.description.clone().unwrap_or_default(),
published_at => DateTime::from_timestamp_secs(latest_entry.published_at.timestamp()), published_at => DateTime::from_timestamp_secs(latest_entry.published_at.timestamp()),
)) ))
.unwrap(); .unwrap();
@ -130,7 +150,12 @@ pub async fn make_search(
(search_reader, search_writer, query_parser) (search_reader, search_writer, query_parser)
} }
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) { pub fn update_search_version(
app_state: &AppState,
name: &PackageName,
version: &Version,
entry: &IndexFileEntry,
) {
let mut search_writer = app_state.search_writer.lock().unwrap(); let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema(); let schema = search_writer.index().schema();
let id_field = schema.get_field("id").unwrap(); let id_field = schema.get_field("id").unwrap();
@ -139,12 +164,35 @@ pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFile
search_writer.add_document(doc!( search_writer.add_document(doc!(
id_field => name.to_string(), id_field => name.to_string(),
schema.get_field("version").unwrap() => version.to_string(),
schema.get_field("scope").unwrap() => name.as_str().0, schema.get_field("scope").unwrap() => name.as_str().0,
schema.get_field("name").unwrap() => name.as_str().1, schema.get_field("name").unwrap() => name.as_str().1,
schema.get_field("description").unwrap() => entry.description.unwrap_or_default(), schema.get_field("description").unwrap() => entry.description.clone().unwrap_or_default(),
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp()) schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp())
)).unwrap(); )).unwrap();
search_writer.commit().unwrap(); search_writer.commit().unwrap();
app_state.search_reader.reload().unwrap(); app_state.search_reader.reload().unwrap();
} }
pub fn search_version_changed(app_state: &AppState, name: &PackageName, file: &IndexFile) {
let entry = if file.meta.deprecated.is_empty() {
find_max(file)
} else {
None
};
let Some((v_id, entry)) = entry else {
let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema();
let id_field = schema.get_field("id").unwrap();
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
search_writer.commit().unwrap();
app_state.search_reader.reload().unwrap();
return;
};
update_search_version(app_state, name, v_id.version(), entry);
}

View file

@ -1,4 +1,4 @@
use crate::{error::Error, storage::StorageImpl}; use crate::{error::RegistryError, storage::StorageImpl};
use actix_web::{ use actix_web::{
http::header::{CONTENT_ENCODING, CONTENT_TYPE}, http::header::{CONTENT_ENCODING, CONTENT_TYPE},
HttpResponse, HttpResponse,
@ -15,7 +15,10 @@ pub struct FSStorage {
pub root: PathBuf, pub root: PathBuf,
} }
async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> { async fn read_file_to_response(
path: &Path,
content_type: &str,
) -> Result<HttpResponse, RegistryError> {
Ok(match fs::read(path).await { Ok(match fs::read(path).await {
Ok(contents) => HttpResponse::Ok() Ok(contents) => HttpResponse::Ok()
.append_header((CONTENT_TYPE, content_type)) .append_header((CONTENT_TYPE, content_type))
@ -32,7 +35,7 @@ impl StorageImpl for FSStorage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
let (scope, name) = package_name.as_str(); let (scope, name) = package_name.as_str();
let path = self let path = self
@ -52,7 +55,7 @@ impl StorageImpl for FSStorage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
let (scope, name) = package_name.as_str(); let (scope, name) = package_name.as_str();
let path = self let path = self
@ -70,7 +73,7 @@ impl StorageImpl for FSStorage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
let (scope, name) = package_name.as_str(); let (scope, name) = package_name.as_str();
let path = self let path = self
@ -90,7 +93,7 @@ impl StorageImpl for FSStorage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
let (scope, name) = package_name.as_str(); let (scope, name) = package_name.as_str();
let path = self let path = self
@ -103,7 +106,7 @@ impl StorageImpl for FSStorage {
read_file_to_response(&path.join("readme.gz"), "text/plain").await read_file_to_response(&path.join("readme.gz"), "text/plain").await
} }
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> { async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
let path = self.root.join("Doc"); let path = self.root.join("Doc");
fs::create_dir_all(&path).await?; fs::create_dir_all(&path).await?;
@ -112,7 +115,7 @@ impl StorageImpl for FSStorage {
Ok(()) Ok(())
} }
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> { async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
let path = self.root.join("Doc"); let path = self.root.join("Doc");
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await

View file

@ -1,4 +1,4 @@
use crate::{benv, error::Error, make_reqwest}; use crate::{benv, error::RegistryError, make_reqwest};
use actix_web::HttpResponse; use actix_web::HttpResponse;
use pesde::{names::PackageName, source::ids::VersionId}; use pesde::{names::PackageName, source::ids::VersionId};
use rusty_s3::{Bucket, Credentials, UrlStyle}; use rusty_s3::{Bucket, Credentials, UrlStyle};
@ -19,31 +19,31 @@ pub trait StorageImpl: Display {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), crate::error::Error>; ) -> Result<(), crate::error::RegistryError>;
async fn get_package( async fn get_package(
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, crate::error::Error>; ) -> Result<HttpResponse, crate::error::RegistryError>;
async fn store_readme( async fn store_readme(
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), crate::error::Error>; ) -> Result<(), crate::error::RegistryError>;
async fn get_readme( async fn get_readme(
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, crate::error::Error>; ) -> Result<HttpResponse, crate::error::RegistryError>;
async fn store_doc( async fn store_doc(
&self, &self,
doc_hash: String, doc_hash: String,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), crate::error::Error>; ) -> Result<(), crate::error::RegistryError>;
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, crate::error::Error>; async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, crate::error::RegistryError>;
} }
impl StorageImpl for Storage { impl StorageImpl for Storage {
@ -52,7 +52,7 @@ impl StorageImpl for Storage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
match self { match self {
Storage::S3(s3) => s3.store_package(package_name, version, contents).await, Storage::S3(s3) => s3.store_package(package_name, version, contents).await,
Storage::FS(fs) => fs.store_package(package_name, version, contents).await, Storage::FS(fs) => fs.store_package(package_name, version, contents).await,
@ -63,7 +63,7 @@ impl StorageImpl for Storage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
match self { match self {
Storage::S3(s3) => s3.get_package(package_name, version).await, Storage::S3(s3) => s3.get_package(package_name, version).await,
Storage::FS(fs) => fs.get_package(package_name, version).await, Storage::FS(fs) => fs.get_package(package_name, version).await,
@ -75,7 +75,7 @@ impl StorageImpl for Storage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
match self { match self {
Storage::S3(s3) => s3.store_readme(package_name, version, contents).await, Storage::S3(s3) => s3.store_readme(package_name, version, contents).await,
Storage::FS(fs) => fs.store_readme(package_name, version, contents).await, Storage::FS(fs) => fs.store_readme(package_name, version, contents).await,
@ -86,21 +86,21 @@ impl StorageImpl for Storage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
match self { match self {
Storage::S3(s3) => s3.get_readme(package_name, version).await, Storage::S3(s3) => s3.get_readme(package_name, version).await,
Storage::FS(fs) => fs.get_readme(package_name, version).await, Storage::FS(fs) => fs.get_readme(package_name, version).await,
} }
} }
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> { async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
match self { match self {
Storage::S3(s3) => s3.store_doc(doc_hash, contents).await, Storage::S3(s3) => s3.store_doc(doc_hash, contents).await,
Storage::FS(fs) => fs.store_doc(doc_hash, contents).await, Storage::FS(fs) => fs.store_doc(doc_hash, contents).await,
} }
} }
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> { async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
match self { match self {
Storage::S3(s3) => s3.get_doc(doc_hash).await, Storage::S3(s3) => s3.get_doc(doc_hash).await,
Storage::FS(fs) => fs.get_doc(doc_hash).await, Storage::FS(fs) => fs.get_doc(doc_hash).await,

View file

@ -1,5 +1,5 @@
use crate::{ use crate::{
error::{Error, ReqwestErrorExt}, error::{RegistryError, ReqwestErrorExt},
storage::StorageImpl, storage::StorageImpl,
}; };
use actix_web::{http::header::LOCATION, HttpResponse}; use actix_web::{http::header::LOCATION, HttpResponse};
@ -26,7 +26,7 @@ impl StorageImpl for S3Storage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
let object_url = PutObject::new( let object_url = PutObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),
@ -55,7 +55,7 @@ impl StorageImpl for S3Storage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
let object_url = GetObject::new( let object_url = GetObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),
@ -77,7 +77,7 @@ impl StorageImpl for S3Storage {
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
contents: Vec<u8>, contents: Vec<u8>,
) -> Result<(), Error> { ) -> Result<(), RegistryError> {
let object_url = PutObject::new( let object_url = PutObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),
@ -106,7 +106,7 @@ impl StorageImpl for S3Storage {
&self, &self,
package_name: &PackageName, package_name: &PackageName,
version: &VersionId, version: &VersionId,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, RegistryError> {
let object_url = GetObject::new( let object_url = GetObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),
@ -123,7 +123,7 @@ impl StorageImpl for S3Storage {
.finish()) .finish())
} }
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> { async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
let object_url = PutObject::new( let object_url = PutObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),
@ -145,7 +145,7 @@ impl StorageImpl for S3Storage {
Ok(()) Ok(())
} }
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> { async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
let object_url = GetObject::new( let object_url = GetObject::new(
&self.s3_bucket, &self.s3_bucket,
Some(&self.s3_credentials), Some(&self.s3_credentials),

View file

@ -1,7 +1,6 @@
use crate::cli::config::read_config; use crate::cli::get_index;
use anyhow::Context;
use clap::{Args, Subcommand}; use clap::{Args, Subcommand};
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME}; use pesde::Project;
mod login; mod login;
mod logout; mod logout;
@ -33,37 +32,7 @@ pub enum AuthCommands {
impl AuthSubcommand { impl AuthSubcommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let manifest = match project.deser_manifest().await { let index_url = get_index(&project, self.index.as_deref()).await?;
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
e => return Err(e.into()),
},
};
let index_url = match self.index.as_deref() {
Some(index) => match index.try_into() {
Ok(url) => Some(url),
Err(_) => None,
},
None => match manifest {
Some(_) => None,
None => Some(read_config().await?.default_index),
},
};
let index_url = match index_url {
Some(url) => url,
None => {
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
manifest
.unwrap()
.indices
.remove(index_name)
.with_context(|| format!("index {index_name} not found in manifest"))?
}
};
match self.command { match self.command {
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await, AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,

View file

@ -0,0 +1,111 @@
use crate::cli::get_index;
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use pesde::{
names::PackageName,
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
Project,
};
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
#[derive(Debug, Args)]
pub struct DeprecateCommand {
/// Whether to undeprecate the package
#[clap(long)]
undo: bool,
/// The index to deprecate the package in
#[clap(short, long)]
index: Option<String>,
/// The package to deprecate
#[clap(index = 1)]
package: PackageName,
/// The reason for deprecating the package
#[clap(index = 2, required_unless_present = "undo")]
reason: Option<String>,
}
impl DeprecateCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let index_url = get_index(&project, self.index.as_deref()).await?;
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.await
.context("failed to refresh source")?;
let config = source
.config(&project)
.await
.context("failed to get index config")?;
let mut request = reqwest.request(
if self.undo {
Method::DELETE
} else {
Method::PUT
},
format!(
"{}/v0/packages/{}/deprecate",
config.api(),
urlencoding::encode(&self.package.to_string()),
),
);
if !self.undo {
request = request.body(
self.reason
.map(|reason| reason.trim().to_string())
.filter(|reason| !reason.is_empty())
.context("deprecating must have non-empty a reason")?,
);
}
if let Some(token) = project.auth_config().tokens().get(&index_url) {
tracing::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token);
}
let response = request.send().await.context("failed to send request")?;
let status = response.status();
let text = response
.text()
.await
.context("failed to get response text")?;
let prefix = if self.undo { "un" } else { "" };
match status {
StatusCode::CONFLICT => {
println!(
"{}",
format!("version is already {prefix}deprecated")
.red()
.bold()
);
}
StatusCode::FORBIDDEN => {
println!(
"{}",
format!("unauthorized to {prefix}deprecate under this scope")
.red()
.bold()
);
}
code if !code.is_success() => {
anyhow::bail!("failed to {prefix}deprecate package: {code} ({text})");
}
_ => {
println!("{text}");
}
}
Ok(())
}
}

View file

@ -3,6 +3,7 @@ use pesde::Project;
mod add; mod add;
mod auth; mod auth;
mod config; mod config;
mod deprecate;
mod execute; mod execute;
mod init; mod init;
mod install; mod install;
@ -18,6 +19,7 @@ mod self_install;
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
mod self_upgrade; mod self_upgrade;
mod update; mod update;
mod yank;
#[derive(Debug, clap::Subcommand)] #[derive(Debug, clap::Subcommand)]
pub enum Subcommand { pub enum Subcommand {
@ -68,6 +70,12 @@ pub enum Subcommand {
/// Executes a binary package without needing to be run in a project directory /// Executes a binary package without needing to be run in a project directory
#[clap(name = "x", visible_alias = "execute", visible_alias = "exec")] #[clap(name = "x", visible_alias = "execute", visible_alias = "exec")]
Execute(execute::ExecuteCommand), Execute(execute::ExecuteCommand),
/// Yanks a package from the registry
Yank(yank::YankCommand),
/// Deprecates a package from the registry
Deprecate(deprecate::DeprecateCommand),
} }
impl Subcommand { impl Subcommand {
@ -91,6 +99,8 @@ impl Subcommand {
Subcommand::Update(update) => update.run(project, reqwest).await, Subcommand::Update(update) => update.run(project, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await, Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest).await, Subcommand::Execute(execute) => execute.run(project, reqwest).await,
Subcommand::Yank(yank) => yank.run(project, reqwest).await,
Subcommand::Deprecate(deprecate) => deprecate.run(project, reqwest).await,
} }
} }
} }

157
src/cli/commands/yank.rs Normal file
View file

@ -0,0 +1,157 @@
use crate::cli::get_index;
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use pesde::{
manifest::target::TargetKind,
names::PackageName,
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
Project,
};
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
use semver::Version;
use std::{fmt::Display, str::FromStr};
#[derive(Debug, Clone)]
enum TargetKindOrAll {
All,
Specific(TargetKind),
}
impl Display for TargetKindOrAll {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TargetKindOrAll::All => write!(f, "all"),
TargetKindOrAll::Specific(kind) => write!(f, "{kind}"),
}
}
}
impl FromStr for TargetKindOrAll {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.eq_ignore_ascii_case("all") {
return Ok(TargetKindOrAll::All);
}
s.parse()
.map(TargetKindOrAll::Specific)
.context("failed to parse target kind")
}
}
#[derive(Debug, Clone)]
struct YankId(PackageName, Version, TargetKindOrAll);
impl FromStr for YankId {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (package, version) = s
.split_once('@')
.context("package is not in format of `scope/name@version target`")?;
let target = match version.split(' ').nth(1) {
Some(target) => target
.parse()
.context("package is not in format of `scope/name@version target`")?,
None => TargetKindOrAll::All,
};
Ok(YankId(
package.parse().context("failed to parse package name")?,
version.parse().context("failed to parse version")?,
target,
))
}
}
#[derive(Debug, Args)]
pub struct YankCommand {
/// Whether to unyank the package
#[clap(long)]
undo: bool,
/// The index to yank the package from
#[clap(short, long)]
index: Option<String>,
/// The package to yank
#[clap(index = 1)]
package: YankId,
}
impl YankCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let YankId(package, version, target) = self.package;
let index_url = get_index(&project, self.index.as_deref()).await?;
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.await
.context("failed to refresh source")?;
let config = source
.config(&project)
.await
.context("failed to get index config")?;
let mut request = reqwest.request(
if self.undo {
Method::DELETE
} else {
Method::PUT
},
format!(
"{}/v0/packages/{}/{}/{}/yank",
config.api(),
urlencoding::encode(&package.to_string()),
urlencoding::encode(&version.to_string()),
urlencoding::encode(&target.to_string()),
),
);
if let Some(token) = project.auth_config().tokens().get(&index_url) {
tracing::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token);
}
let response = request.send().await.context("failed to send request")?;
let status = response.status();
let text = response
.text()
.await
.context("failed to get response text")?;
let prefix = if self.undo { "un" } else { "" };
match status {
StatusCode::CONFLICT => {
println!(
"{}",
format!("version is already {prefix}yanked").red().bold()
);
}
StatusCode::FORBIDDEN => {
println!(
"{}",
format!("unauthorized to {prefix}yank under this scope")
.red()
.bold()
);
}
code if !code.is_success() => {
anyhow::bail!("failed to {prefix}yank package: {code} ({text})");
}
_ => {
println!("{text}");
}
}
Ok(())
}
}

View file

@ -1,8 +1,10 @@
use crate::cli::config::read_config;
use anyhow::Context; use anyhow::Context;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::StreamExt; use futures::StreamExt;
use pesde::{ use pesde::{
errors::ManifestReadError,
lockfile::Lockfile, lockfile::Lockfile,
manifest::{ manifest::{
overrides::{OverrideKey, OverrideSpecifier}, overrides::{OverrideKey, OverrideSpecifier},
@ -13,7 +15,7 @@ use pesde::{
source::{ source::{
ids::VersionId, specifiers::DependencySpecifiers, workspace::specifier::VersionTypeOrReq, ids::VersionId, specifiers::DependencySpecifiers, workspace::specifier::VersionTypeOrReq,
}, },
Project, Project, DEFAULT_INDEX_NAME,
}; };
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::{ use std::{
@ -310,3 +312,37 @@ pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
} }
} }
} }
pub async fn get_index(project: &Project, index: Option<&str>) -> anyhow::Result<gix::Url> {
let manifest = match project.deser_manifest().await {
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
e => return Err(e.into()),
},
};
let index_url = match index {
Some(index) => match index.try_into() {
Ok(url) => Some(url),
Err(_) => None,
},
None => match manifest {
Some(_) => None,
None => Some(read_config().await?.default_index),
},
};
match index_url {
Some(url) => Ok(url),
None => {
let index_name = index.unwrap_or(DEFAULT_INDEX_NAME);
manifest
.unwrap()
.indices
.remove(index_name)
.with_context(|| format!("index {index_name} not found in manifest"))
}
}
}

View file

@ -97,6 +97,16 @@ impl PackageName {
pub fn escaped(&self) -> String { pub fn escaped(&self) -> String {
format!("{}+{}", self.0, self.1) format!("{}+{}", self.0, self.1)
} }
/// Returns the scope of the package name
pub fn scope(&self) -> &str {
&self.0
}
/// Returns the name of the package name
pub fn name(&self) -> &str {
&self.1
}
} }
/// All possible package names /// All possible package names

View file

@ -140,17 +140,23 @@ impl PackageSource for PesdePackageSource {
.. ..
} = options; } = options;
let Some(IndexFile { entries, .. }) = self.read_index_file(&specifier.name, project)? let Some(IndexFile { meta, entries, .. }) =
self.read_index_file(&specifier.name, project)?
else { else {
return Err(errors::ResolveError::NotFound(specifier.name.to_string())); return Err(errors::ResolveError::NotFound(specifier.name.to_string()));
}; };
if !meta.deprecated.is_empty() {
tracing::warn!("{} is deprecated: {}", specifier.name, meta.deprecated);
}
tracing::debug!("{} has {} possible entries", specifier.name, entries.len()); tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok(( Ok((
PackageNames::Pesde(specifier.name.clone()), PackageNames::Pesde(specifier.name.clone()),
entries entries
.into_iter() .into_iter()
.filter(|(_, entry)| !entry.yanked)
.filter(|(VersionId(version, target), _)| { .filter(|(VersionId(version, target), _)| {
specifier.version.matches(version) specifier.version.matches(version)
&& specifier.target.unwrap_or(*project_target) == *target && specifier.target.unwrap_or(*project_target) == *target
@ -484,6 +490,10 @@ pub struct IndexFileEntry {
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")] #[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
pub docs: BTreeSet<DocEntry>, pub docs: BTreeSet<DocEntry>,
/// Whether this version is yanked
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub yanked: bool,
/// The dependencies of this package /// The dependencies of this package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")] #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>, pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
@ -491,10 +501,14 @@ pub struct IndexFileEntry {
/// The package metadata in the index file /// The package metadata in the index file
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub struct IndexMetadata {} pub struct IndexMetadata {
/// Whether this package is deprecated
#[serde(default, skip_serializing_if = "String::is_empty")]
pub deprecated: String,
}
/// The index file for a package /// The index file for a package
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Default)]
pub struct IndexFile { pub struct IndexFile {
/// Any package-wide metadata /// Any package-wide metadata
#[serde(default, skip_serializing_if = "crate::util::is_default")] #[serde(default, skip_serializing_if = "crate::util::is_default")]

View file

@ -5,32 +5,57 @@ export type SearchResponse = {
data: PackageResponse[] data: PackageResponse[]
} }
export type PackageVersionsResponse = PackageResponse[] export type PackageVersionsResponse = {
name: string
deprecated?: string
versions: Record<
string,
{
description?: string
targets: Record<
TargetKind,
{ target: TargetInfoInner; yanked?: boolean } & PackageResponseInner
>
}
>
}
export type PackageVersionResponse = PackageResponse export type PackageVersionResponse = PackageResponse
export type PackageResponseInner = {
published_at: string
license?: string
authors?: string[]
repository?: string
docs?: DocEntry[]
dependencies?: Record<string, DependencyEntry>
}
export type PackageResponse = { export type PackageResponse = {
name: string name: string
version: string version: string
targets: TargetInfo[] targets: TargetInfo[]
description: string description?: string
published_at: string deprecated?: string
license?: string } & PackageResponseInner
authors?: string[]
repository?: string
dependencies: Record<string, DependencyEntry>
docs?: DocEntry[]
}
export type TargetInfo = { export type TargetInfoInner = {
kind: TargetKind
lib: boolean lib: boolean
bin: boolean bin: boolean
scripts?: string[] scripts?: string[]
} }
export type TargetInfo = {
yanked?: boolean
kind: TargetKind
} & TargetInfoInner
export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau" export type TargetKind = "roblox" | "roblox_server" | "lune" | "luau"
export const isTargetKind = (value: string | undefined): value is TargetKind => {
return value === "roblox" || value === "roblox_server" || value === "lune" || value === "luau"
}
export type DependencyEntry = [DependencyInfo, DependencyKind] export type DependencyEntry = [DependencyInfo, DependencyKind]
export type DependencyInfo = export type DependencyInfo =
@ -62,7 +87,6 @@ export type DocEntryCategory = DocEntryBase & {
export type DocEntryPage = DocEntryBase & { export type DocEntryPage = DocEntryBase & {
name: string name: string
hash: string
} }
export const TARGET_KIND_DISPLAY_NAMES: Record<TargetKind, string> = { export const TARGET_KIND_DISPLAY_NAMES: Record<TargetKind, string> = {

View file

@ -73,6 +73,15 @@
</time> </time>
</div> </div>
<p class="mb-6 max-w-prose">{pkgDescription}</p> <p class="mb-6 max-w-prose">{pkgDescription}</p>
{#if data.pkg.deprecated}
<div class="admonition admonition-danger !mb-8">
<p class="admonition-title">
<span class="admonition-icon"></span>
<span class="admonition-label">Deprecated</span>
</p>
<p>{data.pkg.deprecated}</p>
</div>
{/if}
<div class="mb-8 lg:hidden"> <div class="mb-8 lg:hidden">
<TargetSelector /> <TargetSelector />

View file

@ -5,6 +5,7 @@
import { TARGET_KIND_DISPLAY_NAMES, type TargetInfo, type TargetKind } from "$lib/registry-api" import { TARGET_KIND_DISPLAY_NAMES, type TargetInfo, type TargetKind } from "$lib/registry-api"
import { Label, useId } from "bits-ui" import { Label, useId } from "bits-ui"
import { getContext } from "svelte" import { getContext } from "svelte"
import { TriangleAlert } from "lucide-svelte"
const currentTarget = getContext<{ value: TargetInfo }>("currentTarget") const currentTarget = getContext<{ value: TargetInfo }>("currentTarget")
@ -32,6 +33,14 @@
<div class="text-heading mb-1 text-lg font-semibold"> <div class="text-heading mb-1 text-lg font-semibold">
<Label.Root for={id} onclick={() => (open = true)}>Target</Label.Root> <Label.Root for={id} onclick={() => (open = true)}>Target</Label.Root>
{#if currentTarget.value.yanked}
<span
class="ml-1 inline-flex items-center rounded bg-yellow-600/10 px-2 py-1 text-sm text-yellow-950 dark:bg-yellow-500/10 dark:text-yellow-100"
>
<TriangleAlert class="mr-1 inline-block size-4" />
<span class="-mb-0.5">Yanked</span>
</span>
{/if}
</div> </div>
<Select <Select

View file

@ -1,8 +1,10 @@
import { import {
fetchRegistryJson, fetchRegistryJson,
isTargetKind,
RegistryHttpError, RegistryHttpError,
type PackageVersionResponse, type PackageVersionResponse,
type PackageVersionsResponse, type PackageVersionsResponse,
type TargetKind,
} from "$lib/registry-api" } from "$lib/registry-api"
import { error, redirect } from "@sveltejs/kit" import { error, redirect } from "@sveltejs/kit"
import type { LayoutLoad } from "./$types" import type { LayoutLoad } from "./$types"
@ -11,27 +13,30 @@ type FetchPackageOptions = {
scope: string scope: string
name: string name: string
version: string version: string
target: string target: TargetKind
} }
const fetchPackageAndVersions = async (fetcher: typeof fetch, options: FetchPackageOptions) => { const fetchPackageAndVersions = async (fetcher: typeof fetch, options: FetchPackageOptions) => {
const { scope, name, version, target } = options const { scope, name, version, target } = options
try { try {
const [pkg, versions] = await Promise.all([ const versionsResponse = await fetchRegistryJson<PackageVersionsResponse>(
fetchRegistryJson<PackageVersionResponse>( `packages/${encodeURIComponent(`${scope}/${name}`)}`,
`packages/${encodeURIComponent(`${scope}/${name}`)}/${version}/${target}`, fetcher,
fetcher, )
),
fetchRegistryJson<PackageVersionsResponse>( const versions = Object.keys(versionsResponse.versions).reverse()
`packages/${encodeURIComponent(`${scope}/${name}`)}`,
fetcher,
),
])
versions.reverse() return {
return { pkg, versions } pkg: {
name: versionsResponse.name,
version,
targets: versionsResponse.versions[version].targets,
description: versionsResponse.versions[version].description,
...versionsResponse.versions[version].targets[target],
},
versions,
}
} catch (e) { } catch (e) {
if (e instanceof RegistryHttpError && e.response.status === 404) { if (e instanceof RegistryHttpError && e.response.status === 404) {
error(404, "This package does not exist.") error(404, "This package does not exist.")
@ -47,7 +52,7 @@ export const load: LayoutLoad = async ({ params, url, fetch }) => {
error(404, "Not Found") error(404, "Not Found")
} }
if (version === undefined || target === undefined || version === "latest" || target === "any") { if (version === undefined || version === "latest" || !isTargetKind(target)) {
const pkg = await fetchRegistryJson<PackageVersionResponse>( const pkg = await fetchRegistryJson<PackageVersionResponse>(
`packages/${encodeURIComponent(`${scope}/${name}`)}/${version ?? "latest"}/${target ?? "any"}`, `packages/${encodeURIComponent(`${scope}/${name}`)}/${version ?? "latest"}/${target ?? "any"}`,
fetch, fetch,
@ -62,6 +67,6 @@ export const load: LayoutLoad = async ({ params, url, fetch }) => {
return { return {
pkg, pkg,
versions: versions.map((v) => v.version), versions,
} }
} }

View file

@ -2,7 +2,7 @@
import { goto } from "$app/navigation" import { goto } from "$app/navigation"
import { page } from "$app/stores" import { page } from "$app/stores"
import Select from "$lib/components/Select.svelte" import Select from "$lib/components/Select.svelte"
import { TARGET_KIND_DISPLAY_NAMES, type TargetInfo } from "$lib/registry-api" import { TARGET_KIND_DISPLAY_NAMES, type TargetKind } from "$lib/registry-api"
import type { Snippet } from "svelte" import type { Snippet } from "svelte"
let disabled = $state(false) let disabled = $state(false)
@ -23,9 +23,9 @@
</script> </script>
<Select <Select
items={$page.data.pkg.targets.map((target: TargetInfo) => ({ items={Object.keys($page.data.pkg.targets).map((target) => ({
value: target.kind, value: target,
label: TARGET_KIND_DISPLAY_NAMES[target.kind], label: TARGET_KIND_DISPLAY_NAMES[target as TargetKind],
}))} }))}
value={$page.params.target ?? $page.data.pkg.targets[0].kind} value={$page.params.target ?? $page.data.pkg.targets[0].kind}
contentClass={sameWidth ? "" : "w-32"} contentClass={sameWidth ? "" : "w-32"}

View file

@ -4,26 +4,12 @@
const { data } = $props() const { data } = $props()
// Vercel only supports up to Node 20.x, which doesn't support Object.groupBy
function groupBy<T, K extends PropertyKey>(
arr: T[],
predicate: (value: T) => K,
): Partial<Record<K, T[]>> {
const groups: Partial<Record<K, T[]>> = {}
for (const item of arr) {
const key = predicate(item)
if (key in groups) {
groups[key]!.push(item)
} else {
groups[key] = [item]
}
}
return groups
}
let groupedDeps = $derived( let groupedDeps = $derived(
groupBy( Object.groupBy(
Object.entries(data.pkg.dependencies).map(([alias, dependency]) => ({ alias, dependency })), Object.entries(data.pkg.dependencies ?? {}).map(([alias, dependency]) => ({
alias,
dependency,
})),
(entry) => entry.dependency[1], (entry) => entry.dependency[1],
), ),
) )

View file

@ -1,5 +1,5 @@
<script lang="ts"> <script lang="ts">
import { TARGET_KIND_DISPLAY_NAMES, type TargetInfo } from "$lib/registry-api.js" import { TARGET_KIND_DISPLAY_NAMES, type TargetKind } from "$lib/registry-api.js"
import { formatDistanceToNow } from "date-fns" import { formatDistanceToNow } from "date-fns"
const { data } = $props() const { data } = $props()
@ -11,35 +11,43 @@
</script> </script>
<div class="space-y-4 py-4"> <div class="space-y-4 py-4">
{#each data.versions as pkg, index} {#each data.versions as pkgVersion, index}
{@const isLatest = index === 0} {@const isLatest = index === 0}
<article <article
class={`bg-card hover:bg-card-hover relative overflow-hidden rounded px-5 py-4 transition ${ class={`bg-card hover:bg-card-hover relative overflow-hidden rounded px-5 py-4 transition ${
isLatest ? "ring-primary ring-2 ring-inset" : "" isLatest ? "ring-primary ring-2 ring-inset" : ""
}`} }`}
class:opacity-50={Object.values(pkgVersion.targets).every(({ yanked }) => yanked)}
> >
<h2 class="text-heading font-semibold"> <h2 class="text-heading font-semibold">
<a <a
href={`/packages/${pkg.name}/${pkg.version}/any`} href={`/packages/${data.name}/${pkgVersion.version}/any`}
class="after:absolute after:inset-0 after:content-['']" class="after:absolute after:inset-0 after:content-['']"
> >
{pkg.version} {pkgVersion.version}
{#if isLatest} {#if isLatest}
<span class="text-primary">(latest)</span> <span class="text-primary">(latest)</span>
{/if} {/if}
</a> </a>
</h2> </h2>
<div class="text-sm font-semibold" class:invisible={!displayDates}> <div class="text-sm font-semibold" class:invisible={!displayDates}>
<time datetime={pkg.published_at}> <time datetime={pkgVersion.published_at}>
{#if displayDates} {#if displayDates}
{formatDistanceToNow(new Date(pkg.published_at), { addSuffix: true })} {formatDistanceToNow(new Date(pkgVersion.published_at), { addSuffix: true })}
{:else} {:else}
... ...
{/if} {/if}
</time> </time>
· ·
{pkg.targets.map((target: TargetInfo) => TARGET_KIND_DISPLAY_NAMES[target.kind]).join(", ")} {#each Object.entries(pkgVersion.targets) as [target, info], index}
{#if index > 0}
<span>, </span>
{/if}
<span class:line-through={info.yanked}
>{TARGET_KIND_DISPLAY_NAMES[target as TargetKind]}</span
>
{/each}
</div> </div>
</article> </article>
{/each} {/each}

View file

@ -10,18 +10,30 @@ export const load: PageLoad = async ({ params, fetch }) => {
const { scope, name } = params const { scope, name } = params
try { try {
const versions = await fetchRegistryJson<PackageVersionsResponse>( const versionsResponse = await fetchRegistryJson<PackageVersionsResponse>(
`packages/${encodeURIComponent(`${scope}/${name}`)}`, `packages/${encodeURIComponent(`${scope}/${name}`)}`,
fetch, fetch,
) )
versions.reverse() const versions = Object.entries(versionsResponse.versions)
.map(([version, data]) => ({
version,
description: data.description,
targets: data.targets,
published_at: Object.values(data.targets)
.map(({ published_at }) => new Date(published_at))
.sort()
.reverse()[0]
.toISOString(),
}))
.reverse()
return { return {
name: versionsResponse.name,
versions, versions,
meta: { meta: {
title: `${versions[0].name} - versions`, title: `${versionsResponse.name} - versions`,
description: versions[0].description, description: versions[0].description,
}, },
} }