feat(registry): return more info in error responses

This commit is contained in:
daimond113 2024-11-12 17:58:20 +01:00
parent 1be3bf505e
commit 9f93cb93d6
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
2 changed files with 77 additions and 29 deletions

View file

@ -66,6 +66,25 @@ struct DocEntryInfo {
collapsed: bool, collapsed: bool,
} }
fn compare_repo_urls(this: &gix::Url, external: &gix::Url) -> bool {
let this = this.to_bstring().to_string().to_lowercase();
let external = external.to_bstring().to_string().to_lowercase();
let this = if this.ends_with(".git") {
&this[..this.len() - 4]
} else {
&this
};
let external = if external.ends_with(".git") {
&external[..external.len() - 4]
} else {
&external
};
this == external
}
pub async fn publish_package( pub async fn publish_package(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
bytes: Bytes, bytes: Bytes,
@ -94,12 +113,14 @@ pub async fn publish_package(
let file_name = entry let file_name = entry
.file_name() .file_name()
.to_str() .to_str()
.ok_or(Error::InvalidArchive)? .ok_or_else(|| Error::InvalidArchive("file name contains non UTF-8 characters".into()))?
.to_string(); .to_string();
if entry.file_type().await?.is_dir() { if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_str()) { if IGNORED_DIRS.contains(&file_name.as_str()) {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(format!(
"archive contains forbidden directory: {file_name}"
)));
} }
if file_name == "docs" { if file_name == "docs" {
@ -114,7 +135,11 @@ pub async fn publish_package(
let file_name = entry let file_name = entry
.file_name() .file_name()
.to_str() .to_str()
.ok_or(Error::InvalidArchive)? .ok_or_else(|| {
Error::InvalidArchive(
"file name contains non UTF-8 characters".into(),
)
})?
.to_string(); .to_string();
if entry.file_type().await?.is_dir() { if entry.file_type().await?.is_dir() {
@ -176,8 +201,12 @@ pub async fn publish_package(
.and_then(|l| l.strip_prefix("# ")) .and_then(|l| l.strip_prefix("# "))
.map(|s| s.to_string()); .map(|s| s.to_string());
let info: DocEntryInfo = serde_yaml::from_str(&front_matter) let info: DocEntryInfo =
.map_err(|_| Error::InvalidArchive)?; serde_yaml::from_str(&front_matter).map_err(|_| {
Error::InvalidArchive(format!(
"doc {file_name}'s frontmatter isn't valid YAML"
))
})?;
set.insert(DocEntry { set.insert(DocEntry {
label: info.label.or(h1).unwrap_or(file_name.to_case(Case::Title)), label: info.label.or(h1).unwrap_or(file_name.to_case(Case::Title)),
@ -189,7 +218,11 @@ pub async fn publish_package(
.unwrap() .unwrap()
.with_extension("") .with_extension("")
.to_str() .to_str()
.ok_or(Error::InvalidArchive)? .ok_or_else(|| {
Error::InvalidArchive(
"file name contains non UTF-8 characters".into(),
)
})?
// ensure that the path is always using forward slashes // ensure that the path is always using forward slashes
.replace("\\", "/"), .replace("\\", "/"),
hash, hash,
@ -223,12 +256,12 @@ pub async fn publish_package(
continue; continue;
} }
if IGNORED_FILES.contains(&file_name.as_str()) { if IGNORED_FILES.contains(&file_name.as_str())
return Err(Error::InvalidArchive); || ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
} {
return Err(Error::InvalidArchive(format!(
if ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str()) { "archive contains forbidden file: {file_name}"
return Err(Error::InvalidArchive); )));
} }
if file_name == MANIFEST_FILE_NAME { if file_name == MANIFEST_FILE_NAME {
@ -242,7 +275,9 @@ pub async fn publish_package(
.is_some() .is_some()
{ {
if readme.is_some() { if readme.is_some() {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(
"archive contains multiple readme files".into(),
));
} }
let mut file = fs::File::open(entry.path()).await?; let mut file = fs::File::open(entry.path()).await?;
@ -255,7 +290,9 @@ pub async fn publish_package(
} }
let Some(manifest) = manifest else { let Some(manifest) = manifest else {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(
"archive doesn't contain a manifest".into(),
));
}; };
add_breadcrumb(sentry::Breadcrumb { add_breadcrumb(sentry::Breadcrumb {
@ -273,9 +310,9 @@ pub async fn publish_package(
}); });
{ {
let dependencies = manifest let dependencies = manifest.all_dependencies().map_err(|e| {
.all_dependencies() Error::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
.map_err(|_| Error::InvalidArchive)?; })?;
for (specifier, _) in dependencies.values() { for (specifier, _) in dependencies.values() {
match specifier { match specifier {
@ -285,17 +322,21 @@ pub async fn publish_package(
.as_deref() .as_deref()
.filter(|index| match gix::Url::try_from(*index) { .filter(|index| match gix::Url::try_from(*index) {
Ok(_) if config.other_registries_allowed => true, Ok(_) if config.other_registries_allowed => true,
Ok(url) => url == *source.repo_url(), Ok(url) => compare_repo_urls(source.repo_url(), &url),
Err(_) => false, Err(_) => false,
}) })
.is_none() .is_none()
{ {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(format!(
"invalid index in pesde dependency {specifier}"
)));
} }
} }
DependencySpecifiers::Wally(specifier) => { DependencySpecifiers::Wally(specifier) => {
if !config.wally_allowed { if !config.wally_allowed {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(
"wally dependencies are not allowed".into(),
));
} }
if specifier if specifier
@ -304,17 +345,23 @@ pub async fn publish_package(
.filter(|index| index.parse::<url::Url>().is_ok()) .filter(|index| index.parse::<url::Url>().is_ok())
.is_none() .is_none()
{ {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(format!(
"invalid index in wally dependency {specifier}"
)));
} }
} }
DependencySpecifiers::Git(_) => { DependencySpecifiers::Git(_) => {
if !config.git_allowed { if !config.git_allowed {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(
"git dependencies are not allowed".into(),
));
} }
} }
DependencySpecifiers::Workspace(_) => { DependencySpecifiers::Workspace(_) => {
// workspace specifiers are to be transformed into Pesde specifiers by the sender // workspace specifiers are to be transformed into Pesde specifiers by the sender
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive(
"non-transformed workspace dependency".into(),
));
} }
} }
} }

View file

@ -25,7 +25,7 @@ pub enum Error {
Tar(#[from] std::io::Error), Tar(#[from] std::io::Error),
#[error("invalid archive")] #[error("invalid archive")]
InvalidArchive, InvalidArchive(String),
#[error("failed to read index config")] #[error("failed to read index config")]
Config(#[from] pesde::source::pesde::errors::ConfigError), Config(#[from] pesde::source::pesde::errors::ConfigError),
@ -60,11 +60,12 @@ impl ResponseError for Error {
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse { Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("failed to parse query: {e}"), error: format!("failed to parse query: {e}"),
}), }),
Error::Tar(_) | Error::InvalidArchive => { Error::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
HttpResponse::BadRequest().json(ErrorResponse { error: "corrupt archive".to_string(),
error: "invalid archive".to_string(), }),
}) Error::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
} error: format!("archive is invalid: {e}"),
}),
e => { e => {
log::error!("unhandled error: {e:?}"); log::error!("unhandled error: {e:?}");
HttpResponse::InternalServerError().finish() HttpResponse::InternalServerError().finish()