From ff5c2e5d61d7681edd7947ec7d527c40b78428a7 Mon Sep 17 00:00:00 2001 From: daimond113 Date: Sat, 8 Mar 2025 22:00:52 +0100 Subject: [PATCH] refactor: specify many new clippy lints Adds quite a lot of Clippy lints which fit with my personal taste for how pesde's codebase should look like. Stylistic lints are mostly warns, and behavioural lints are mostly denied. --- Cargo.toml | 144 ++++++++++++++++++++++ clippy.toml | 1 + registry/src/auth/github.rs | 7 +- registry/src/auth/mod.rs | 34 +++-- registry/src/auth/rw_token.rs | 24 ++-- registry/src/auth/token.rs | 13 +- registry/src/endpoints/package_archive.rs | 4 +- registry/src/endpoints/package_doc.rs | 10 +- registry/src/endpoints/package_readme.rs | 4 +- registry/src/endpoints/package_version.rs | 6 +- registry/src/endpoints/publish_version.rs | 38 +++--- registry/src/endpoints/search.rs | 6 +- registry/src/git.rs | 6 +- registry/src/main.rs | 5 +- registry/src/package.rs | 6 +- registry/src/request_path.rs | 20 +-- registry/src/search.rs | 108 ++++++++++------ registry/src/storage/s3.rs | 4 +- src/cli/auth.rs | 4 +- src/cli/commands/add.rs | 13 +- src/cli/commands/auth/login.rs | 7 +- src/cli/commands/auth/logout.rs | 2 +- src/cli/commands/auth/token.rs | 11 +- src/cli/commands/auth/whoami.rs | 11 +- src/cli/commands/cas/prune.rs | 10 +- src/cli/commands/deprecate.rs | 4 +- src/cli/commands/execute.rs | 8 +- src/cli/commands/init.rs | 18 +-- src/cli/commands/list.rs | 4 +- src/cli/commands/outdated.rs | 6 +- src/cli/commands/patch.rs | 8 +- src/cli/commands/patch_commit.rs | 4 +- src/cli/commands/publish.rs | 69 ++++++----- src/cli/commands/remove.rs | 4 +- src/cli/commands/run.rs | 22 ++-- src/cli/commands/self_install.rs | 11 +- src/cli/commands/self_upgrade.rs | 2 +- src/cli/commands/yank.rs | 4 +- src/cli/config.rs | 2 +- src/cli/install.rs | 14 +-- src/cli/mod.rs | 68 +++++----- src/cli/reporters.rs | 2 +- src/cli/version.rs | 33 +++-- src/download.rs | 8 +- src/download_and_link.rs | 19 ++- src/engine/mod.rs | 3 +- src/engine/source/archive.rs | 24 ++-- src/engine/source/mod.rs | 2 + src/graph.rs | 4 +- src/lib.rs | 28 +++-- src/linking/generator.rs | 23 ++-- src/linking/incremental.rs | 14 +-- src/linking/mod.rs | 12 +- src/lockfile.rs | 2 + src/main.rs | 19 +-- src/manifest/mod.rs | 3 +- src/manifest/target.rs | 9 +- src/names.rs | 15 ++- src/patches.rs | 6 +- src/reporters.rs | 4 +- src/resolver.rs | 14 +-- src/scripts.rs | 4 +- src/source/fs.rs | 6 +- src/source/git/mod.rs | 23 ++-- src/source/git_index.rs | 7 +- src/source/ids.rs | 9 ++ src/source/path/mod.rs | 4 +- src/source/pesde/mod.rs | 7 +- src/source/refs.rs | 1 + src/source/wally/mod.rs | 12 +- src/source/workspace/mod.rs | 6 +- src/util.rs | 19 +-- 72 files changed, 657 insertions(+), 431 deletions(-) create mode 100644 clippy.toml diff --git a/Cargo.toml b/Cargo.toml index fe42c81..23e38b9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,151 @@ path = "src/main.rs" required-features = ["bin"] [workspace.lints.clippy] +zero_sized_map_values = "warn" +while_float = "deny" +useless_let_if_seq = "warn" +unused_trait_names = "warn" +unused_result_ok = "warn" +unused_peekable = "warn" +unused_async = "warn" +unreadable_literal = "warn" +unnested_or_patterns = "warn" +unneeded_field_pattern = "warn" +unnecessary_wraps = "warn" +unnecessary_semicolon = "warn" +unnecessary_self_imports = "warn" +unnecessary_literal_bound = "warn" +unnecessary_join = "warn" +unnecessary_box_returns = "warn" uninlined_format_args = "warn" +type_repetition_in_bounds = "warn" +try_err = "warn" +trivially_copy_pass_by_ref = "warn" +trait_duplication_in_bounds = "warn" +todo = "deny" +suspicious_operation_groupings = "warn" +suboptimal_flops = "deny" +struct_field_names = "warn" +string_to_string = "warn" +string_lit_chars_any = "warn" +string_lit_as_bytes = "warn" +str_split_at_newline = "warn" +stable_sort_primitive = "warn" +single_option_map = "warn" +single_match_else = "warn" +single_char_pattern = "warn" +significant_drop_tightening = "warn" +significant_drop_in_scrutinee = "warn" +set_contains_or_insert = "deny" +separated_literal_suffix = "warn" +semicolon_inside_block = "warn" +semicolon_if_nothing_returned = "warn" +self_named_module_files = "warn" +same_functions_in_if_condition = "warn" +return_and_then = "warn" +renamed_function_params = "warn" +ref_patterns = "deny" +ref_option = "deny" +ref_binding_to_reference = "deny" +redundant_type_annotations = "deny" +redundant_else = "warn" +redundant_closure_for_method_calls = "warn" +redundant_clone = "deny" +read_zero_byte_vec = "warn" +rc_buffer = "deny" +range_plus_one = "deny" +range_minus_one = "deny" +pub_without_shorthand = "deny" +pub_underscore_fields = "deny" +precedence_bits = "deny" +pathbuf_init_then_push = "warn" +path_buf_push_overwrite = "warn" +option_option = "deny" +option_as_ref_cloned = "deny" +nonstandard_macro_braces = "deny" +non_zero_suggestions = "deny" +no_effect_underscore_binding = "warn" +needless_raw_string_hashes = "warn" +needless_pass_by_value = "deny" +needless_pass_by_ref_mut = "warn" +needless_for_each = "deny" +needless_continue = "deny" +needless_collect = "deny" +needless_bitwise_bool = "deny" +mut_mut = "deny" +must_use_candidate = "warn" +mem_forget = "deny" +maybe_infinite_iter = "deny" +match_wildcard_for_single_variants = "deny" +match_bool = "warn" +map_unwrap_or = "warn" +map_err_ignore = "warn" +manual_midpoint = "warn" +manual_let_else = "warn" +manual_is_variant_and = "warn" +manual_is_power_of_two = "warn" +lossy_float_literal = "deny" +literal_string_with_formatting_args = "warn" +large_types_passed_by_value = "warn" +large_stack_frames = "warn" +large_stack_arrays = "warn" +large_digit_groups = "deny" +iter_with_drain = "deny" +iter_on_single_items = "deny" +iter_on_empty_collections = "deny" +iter_filter_is_some = "deny" +iter_filter_is_ok = "deny" +invalid_upcast_comparisons = "deny" +integer_division = "deny" +infinite_loop = "deny" +inefficient_to_string = "warn" +index_refutable_slice = "deny" +inconsistent_struct_constructor = "warn" +imprecise_flops = "deny" +implicit_clone = "warn" +if_then_some_else_none = "warn" +if_not_else = "warn" +get_unwrap = "warn" +from_iter_instead_of_collect = "warn" +format_push_string = "warn" +format_collect = "warn" +fn_to_numeric_cast_any = "deny" +float_cmp_const = "deny" +float_cmp = "deny" +float_arithmetic = "warn" +flat_map_option = "warn" +filter_map_next = "warn" +filetype_is_file = "deny" +explicit_iter_loop = "warn" +explicit_into_iter_loop = "warn" +explicit_deref_methods = "warn" +equatable_if_let = "warn" +enum_glob_use = "warn" +empty_structs_with_brackets = "warn" +empty_enum_variants_with_brackets = "warn" +empty_drop = "warn" +elidable_lifetime_names = "warn" +doc_link_with_quotes = "warn" +doc_link_code = "warn" +doc_include_without_cfg = "warn" +disallowed_script_idents = "warn" +derive_partial_eq_without_eq = "warn" +deref_by_slicing = "warn" +default_numeric_fallback = "warn" +dbg_macro = "deny" +comparison_chain = "warn" +collection_is_never_read = "warn" +cloned_instead_of_copied = "warn" +clear_with_drain = "warn" +cfg_not_test = "warn" +cast_sign_loss = "deny" +cast_precision_loss = "deny" +cast_possible_wrap = "deny" +case_sensitive_file_extension_comparisons = "warn" +branches_sharing_code = "warn" +bool_to_int_with_if = "warn" +assigning_clones = "warn" +as_underscore = "warn" [lints] workspace = true diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000..cda8d17 --- /dev/null +++ b/clippy.toml @@ -0,0 +1 @@ +avoid-breaking-exported-api = false diff --git a/registry/src/auth/github.rs b/registry/src/auth/github.rs index 802a741..5ef79f6 100644 --- a/registry/src/auth/github.rs +++ b/registry/src/auth/github.rs @@ -1,6 +1,6 @@ use crate::{ auth::{get_token_from_req, AuthImpl, UserId}, - error::{display_error, ReqwestErrorExt}, + error::{display_error, ReqwestErrorExt as _}, }; use actix_web::{dev::ServiceRequest, Error as ActixError}; use reqwest::StatusCode; @@ -21,9 +21,8 @@ struct TokenRequestBody { impl AuthImpl for GitHubAuth { async fn for_write_request(&self, req: &ServiceRequest) -> Result, ActixError> { - let token = match get_token_from_req(req) { - Some(token) => token, - None => return Ok(None), + let Some(token) = get_token_from_req(req) else { + return Ok(None); }; let response = match self diff --git a/registry/src/auth/mod.rs b/registry/src/auth/mod.rs index ab74f4d..1367eb9 100644 --- a/registry/src/auth/mod.rs +++ b/registry/src/auth/mod.rs @@ -11,11 +11,11 @@ use actix_web::{ error::Error as ActixError, http::header::AUTHORIZATION, middleware::Next, - web, HttpMessage, HttpResponse, + web, HttpMessage as _, HttpResponse, }; use pesde::source::pesde::IndexConfig; use sentry::add_breadcrumb; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::fmt::Display; #[derive(Debug, Copy, Clone, Hash, PartialOrd, PartialEq, Eq, Ord)] @@ -106,13 +106,10 @@ pub async fn write_mw( req: ServiceRequest, next: Next, ) -> Result, ActixError> { - let user_id = match app_state.auth.for_write_request(&req).await? { - Some(user_id) => user_id, - None => { - return Ok(req - .into_response(HttpResponse::Unauthorized().finish()) - .map_into_right_body()) - } + let Some(user_id) = app_state.auth.for_write_request(&req).await? else { + return Ok(req + .into_response(HttpResponse::Unauthorized().finish()) + .map_into_right_body()); }; add_breadcrumb(sentry::Breadcrumb { @@ -124,7 +121,9 @@ pub async fn write_mw( req.extensions_mut().insert(user_id); - next.call(req).await.map(|res| res.map_into_left_body()) + next.call(req) + .await + .map(ServiceResponse::map_into_left_body) } pub async fn read_mw( @@ -133,13 +132,10 @@ pub async fn read_mw( next: Next, ) -> Result, ActixError> { if app_state.auth.read_needs_auth() { - let user_id = match app_state.auth.for_read_request(&req).await? { - Some(user_id) => user_id, - None => { - return Ok(req - .into_response(HttpResponse::Unauthorized().finish()) - .map_into_right_body()) - } + let Some(user_id) = app_state.auth.for_read_request(&req).await? else { + return Ok(req + .into_response(HttpResponse::Unauthorized().finish()) + .map_into_right_body()); }; add_breadcrumb(sentry::Breadcrumb { @@ -154,7 +150,9 @@ pub async fn read_mw( req.extensions_mut().insert(None::); } - next.call(req).await.map(|res| res.map_into_left_body()) + next.call(req) + .await + .map(ServiceResponse::map_into_left_body) } pub fn get_auth_from_env(config: &IndexConfig) -> Auth { diff --git a/registry/src/auth/rw_token.rs b/registry/src/auth/rw_token.rs index 025cc96..5562c6d 100644 --- a/registry/src/auth/rw_token.rs +++ b/registry/src/auth/rw_token.rs @@ -1,7 +1,7 @@ use crate::auth::{get_token_from_req, AuthImpl, UserId}; use actix_web::{dev::ServiceRequest, Error as ActixError}; use constant_time_eq::constant_time_eq_32; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::fmt::Display; #[derive(Debug)] @@ -12,33 +12,23 @@ pub struct RwTokenAuth { impl AuthImpl for RwTokenAuth { async fn for_write_request(&self, req: &ServiceRequest) -> Result, ActixError> { - let token = match get_token_from_req(req) { - Some(token) => token, - None => return Ok(None), + let Some(token) = get_token_from_req(req) else { + return Ok(None); }; let token: [u8; 32] = Sha256::digest(token.as_bytes()).into(); - Ok(if constant_time_eq_32(&self.write_token, &token) { - Some(UserId::DEFAULT) - } else { - None - }) + Ok(constant_time_eq_32(&self.write_token, &token).then_some(UserId::DEFAULT)) } async fn for_read_request(&self, req: &ServiceRequest) -> Result, ActixError> { - let token = match get_token_from_req(req) { - Some(token) => token, - None => return Ok(None), + let Some(token) = get_token_from_req(req) else { + return Ok(None); }; let token: [u8; 32] = Sha256::digest(token.as_bytes()).into(); - Ok(if constant_time_eq_32(&self.read_token, &token) { - Some(UserId::DEFAULT) - } else { - None - }) + Ok(constant_time_eq_32(&self.read_token, &token).then_some(UserId::DEFAULT)) } fn read_needs_auth(&self) -> bool { diff --git a/registry/src/auth/token.rs b/registry/src/auth/token.rs index e00bfde..7873603 100644 --- a/registry/src/auth/token.rs +++ b/registry/src/auth/token.rs @@ -1,7 +1,7 @@ use crate::auth::{get_token_from_req, AuthImpl, UserId}; use actix_web::{dev::ServiceRequest, Error as ActixError}; use constant_time_eq::constant_time_eq_32; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::fmt::Display; #[derive(Debug)] @@ -12,18 +12,13 @@ pub struct TokenAuth { impl AuthImpl for TokenAuth { async fn for_write_request(&self, req: &ServiceRequest) -> Result, ActixError> { - let token = match get_token_from_req(req) { - Some(token) => token, - None => return Ok(None), + let Some(token) = get_token_from_req(req) else { + return Ok(None); }; let token: [u8; 32] = Sha256::digest(token.as_bytes()).into(); - Ok(if constant_time_eq_32(&self.token, &token) { - Some(UserId::DEFAULT) - } else { - None - }) + Ok(constant_time_eq_32(&self.token, &token).then_some(UserId::DEFAULT)) } } diff --git a/registry/src/endpoints/package_archive.rs b/registry/src/endpoints/package_archive.rs index 2554f6f..8693d48 100644 --- a/registry/src/endpoints/package_archive.rs +++ b/registry/src/endpoints/package_archive.rs @@ -4,7 +4,7 @@ use crate::{ error::RegistryError, package::read_package, request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion}, - storage::StorageImpl, + storage::StorageImpl as _, AppState, }; use pesde::names::PackageName; @@ -19,7 +19,7 @@ pub async fn get_package_archive( return Ok(HttpResponse::NotFound().finish()); }; - let Some(v_id) = resolve_version_and_target(&file, version, target) else { + let Some(v_id) = resolve_version_and_target(&file, version, &target) else { return Ok(HttpResponse::NotFound().finish()); }; diff --git a/registry/src/endpoints/package_doc.rs b/registry/src/endpoints/package_doc.rs index 72ba0bd..fa769a0 100644 --- a/registry/src/endpoints/package_doc.rs +++ b/registry/src/endpoints/package_doc.rs @@ -2,7 +2,7 @@ use crate::{ error::RegistryError, package::read_package, request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion}, - storage::StorageImpl, + storage::StorageImpl as _, AppState, }; use actix_web::{web, HttpResponse}; @@ -29,10 +29,10 @@ pub fn find_package_doc<'a>( match doc { DocEntryKind::Page { name, hash } if name == doc_name => return Some(hash.as_str()), DocEntryKind::Category { items, .. } => { - queue.extend(items.iter().map(|item| &item.kind)) + queue.extend(items.iter().map(|item| &item.kind)); } - _ => continue, - }; + DocEntryKind::Page { .. } => {} + } } None @@ -54,7 +54,7 @@ pub async fn get_package_doc( return Ok(HttpResponse::NotFound().finish()); }; - let Some(v_id) = resolve_version_and_target(&file, version, target) else { + let Some(v_id) = resolve_version_and_target(&file, version, &target) else { return Ok(HttpResponse::NotFound().finish()); }; diff --git a/registry/src/endpoints/package_readme.rs b/registry/src/endpoints/package_readme.rs index 29e3578..2c7e2e0 100644 --- a/registry/src/endpoints/package_readme.rs +++ b/registry/src/endpoints/package_readme.rs @@ -4,7 +4,7 @@ use crate::{ error::RegistryError, package::read_package, request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion}, - storage::StorageImpl, + storage::StorageImpl as _, AppState, }; use pesde::names::PackageName; @@ -19,7 +19,7 @@ pub async fn get_package_readme( return Ok(HttpResponse::NotFound().finish()); }; - let Some(v_id) = resolve_version_and_target(&file, version, target) else { + let Some(v_id) = resolve_version_and_target(&file, version, &target) else { return Ok(HttpResponse::NotFound().finish()); }; diff --git a/registry/src/endpoints/package_version.rs b/registry/src/endpoints/package_version.rs index c5c9c8e..fa552c4 100644 --- a/registry/src/endpoints/package_version.rs +++ b/registry/src/endpoints/package_version.rs @@ -6,7 +6,7 @@ use crate::{ error::RegistryError, package::{read_package, PackageResponse}, request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion}, - storage::StorageImpl, + storage::StorageImpl as _, AppState, }; use pesde::names::PackageName; @@ -28,7 +28,7 @@ pub async fn get_package_version_v0( return Ok(HttpResponse::NotFound().finish()); }; - let Some(v_id) = resolve_version_and_target(&file, version, target) else { + let Some(v_id) = resolve_version_and_target(&file, version, &target) else { return Ok(HttpResponse::NotFound().finish()); }; @@ -71,7 +71,7 @@ pub async fn get_package_version( return Ok(HttpResponse::NotFound().finish()); }; - let Some(v_id) = resolve_version_and_target(&file, version, target) else { + let Some(v_id) = resolve_version_and_target(&file, version, &target) else { return Ok(HttpResponse::NotFound().finish()); }; diff --git a/registry/src/endpoints/publish_version.rs b/registry/src/endpoints/publish_version.rs index 841494f..748c32f 100644 --- a/registry/src/endpoints/publish_version.rs +++ b/registry/src/endpoints/publish_version.rs @@ -4,17 +4,17 @@ use crate::{ git::push_changes, package::{read_package, read_scope_info}, search::update_search_version, - storage::StorageImpl, + storage::StorageImpl as _, AppState, }; use actix_web::{web, web::Bytes, HttpResponse}; use async_compression::Level; -use convert_case::{Case, Casing}; +use convert_case::{Case, Casing as _}; use fs_err::tokio as fs; use pesde::{ manifest::{DependencyType, Manifest}, source::{ - git_index::GitBasedSource, + git_index::GitBasedSource as _, ids::VersionId, pesde::{DocEntry, DocEntryKind, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE}, specifiers::DependencySpecifiers, @@ -25,13 +25,13 @@ use pesde::{ }; use sentry::add_breadcrumb; use serde::Deserialize; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::{ collections::{BTreeSet, HashMap}, io::Cursor, }; use tokio::{ - io::{AsyncReadExt, AsyncWriteExt}, + io::{AsyncReadExt as _, AsyncWriteExt as _}, task::JoinSet, }; @@ -144,7 +144,7 @@ pub async fn publish_package( ); let mut bytes = vec![]; gz.read_to_end(&mut bytes).await?; - docs_pages.insert(hash.to_string(), bytes); + docs_pages.insert(hash.clone(), bytes); let mut lines = content.lines().peekable(); let front_matter = if lines.peek().filter(|l| **l == "---").is_some() { @@ -166,10 +166,10 @@ pub async fn publish_package( let h1 = lines .find(|l| !l.trim().is_empty()) .and_then(|l| l.strip_prefix("# ")) - .map(|s| s.to_string()); + .map(ToString::to_string); let info: DocEntryInfo = - serde_yaml::from_str(&front_matter).map_err(|_| { + serde_yaml::from_str(&front_matter).map_err(|_e| { RegistryError::InvalidArchive(format!( "doc {file_name}'s frontmatter isn't valid YAML" )) @@ -194,7 +194,7 @@ pub async fn publish_package( ) })? // ensure that the path is always using forward slashes - .replace("\\", "/"), + .replace('\\', "/"), hash, }, }); @@ -341,19 +341,16 @@ pub async fn publish_package( let mut files = HashMap::new(); let scope = read_scope_info(&app_state, manifest.name.scope(), &source).await?; - match scope { - Some(info) => { - if !info.owners.contains(&user_id.0) { - return Ok(HttpResponse::Forbidden().finish()); - } + if let Some(info) = scope { + if !info.owners.contains(&user_id.0) { + return Ok(HttpResponse::Forbidden().finish()); } - None => { - let scope_info = toml::to_string(&ScopeInfo { - owners: BTreeSet::from([user_id.0]), - })?; + } else { + let scope_info = toml::to_string(&ScopeInfo { + owners: BTreeSet::from([user_id.0]), + })?; - files.insert(SCOPE_INFO_FILE.to_string(), scope_info.into_bytes()); - } + files.insert(SCOPE_INFO_FILE.to_string(), scope_info.into_bytes()); } let mut file = read_package(&app_state, &manifest.name, &source) @@ -414,6 +411,7 @@ pub async fn publish_package( ), ) .await?; + drop(source); update_search_version(&app_state, &manifest.name, &new_entry); } diff --git a/registry/src/endpoints/search.rs b/registry/src/endpoints/search.rs index 9efcb5c..b6703d0 100644 --- a/registry/src/endpoints/search.rs +++ b/registry/src/endpoints/search.rs @@ -8,7 +8,7 @@ use actix_web::{web, HttpResponse}; use pesde::names::PackageName; use serde::Deserialize; use std::{collections::HashMap, sync::Arc}; -use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order}; +use tantivy::{collector::Count, query::AllQuery, schema::Value as _, DateTime, Order}; use tokio::task::JoinSet; #[derive(Deserialize)] @@ -64,9 +64,7 @@ pub async fn search_packages( let source = source.clone(); async move { - let id = doc - .get(&id) - .unwrap() + let id = (&doc[&id]) .as_str() .unwrap() .parse::() diff --git a/registry/src/git.rs b/registry/src/git.rs index 253b7f6..17255d9 100644 --- a/registry/src/git.rs +++ b/registry/src/git.rs @@ -1,6 +1,6 @@ use crate::{benv, error::RegistryError, AppState}; use git2::{Remote, Repository, Signature}; -use pesde::source::{git_index::GitBasedSource, pesde::PesdePackageSource}; +use pesde::source::{git_index::GitBasedSource as _, pesde::PesdePackageSource}; use std::collections::HashMap; use tokio::task::spawn_blocking; @@ -26,8 +26,8 @@ fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result reqwest::Client { reqwest::ClientBuilder::new() .user_agent(concat!( diff --git a/registry/src/package.rs b/registry/src/package.rs index 0d0b509..6a8a693 100644 --- a/registry/src/package.rs +++ b/registry/src/package.rs @@ -6,7 +6,7 @@ use pesde::{ }, names::PackageName, source::{ - git_index::{read_file, root_tree, GitBasedSource}, + git_index::{read_file, root_tree, GitBasedSource as _}, ids::VersionId, pesde::{IndexFile, IndexFileEntry, PesdePackageSource, ScopeInfo, SCOPE_INFO_FILE}, specifiers::DependencySpecifiers, @@ -155,7 +155,7 @@ pub struct PackageResponse { impl PackageResponse { pub fn new(name: &PackageName, version_id: &VersionId, file: &IndexFile) -> Self { - let entry = file.entries.get(version_id).unwrap(); + let entry = &file.entries[version_id]; PackageResponse { name: name.to_string(), @@ -201,7 +201,7 @@ impl PackageVersionsResponse { pub fn new(name: &PackageName, file: &IndexFile) -> Self { let mut versions = BTreeMap::::new(); - for (v_id, entry) in file.entries.iter() { + for (v_id, entry) in &file.entries { let versions_resp = versions.entry(v_id.version().clone()).or_default(); versions_resp.description = entry.description.clone().unwrap_or_default(); diff --git a/registry/src/request_path.rs b/registry/src/request_path.rs index f7fd72b..db59a32 100644 --- a/registry/src/request_path.rs +++ b/registry/src/request_path.rs @@ -49,16 +49,18 @@ impl<'de> Deserialize<'de> for AnyOrSpecificTarget { } } -pub fn resolve_version_and_target( - file: &IndexFile, +pub fn resolve_version_and_target<'a>( + file: &'a IndexFile, version: LatestOrSpecificVersion, - target: AnyOrSpecificTarget, -) -> Option<&VersionId> { + target: &AnyOrSpecificTarget, +) -> Option<&'a VersionId> { let version = match version { - LatestOrSpecificVersion::Latest => match file.entries.keys().map(|k| k.version()).max() { - Some(latest) => latest.clone(), - None => return None, - }, + LatestOrSpecificVersion::Latest => { + match file.entries.keys().map(VersionId::version).max() { + Some(latest) => latest.clone(), + None => return None, + } + } LatestOrSpecificVersion::Specific(version) => version, }; @@ -70,7 +72,7 @@ pub fn resolve_version_and_target( match target { AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| v_id.target()), AnyOrSpecificTarget::Specific(kind) => { - versions.find(|(_, entry)| entry.target.kind() == kind) + versions.find(|(_, entry)| entry.target.kind() == *kind) } } .map(|(v_id, _)| v_id) diff --git a/registry/src/search.rs b/registry/src/search.rs index 2a4d069..6604289 100644 --- a/registry/src/search.rs +++ b/registry/src/search.rs @@ -1,15 +1,14 @@ use crate::AppState; -use async_stream::stream; -use futures::{Stream, StreamExt}; use pesde::{ names::PackageName, source::{ - git_index::{root_tree, GitBasedSource}, + git_index::{root_tree, GitBasedSource as _}, ids::VersionId, pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE}, }, Project, }; +use std::collections::BTreeMap; use tantivy::{ doc, query::QueryParser, @@ -17,54 +16,78 @@ use tantivy::{ tokenizer::TextAnalyzer, DateTime, IndexReader, IndexWriter, Term, }; -use tokio::pin; -async fn all_packages( - source: &PesdePackageSource, - project: &Project, -) -> impl Stream { - let path = source.path(project); +type Entries = BTreeMap; - stream! { - let repo = gix::open(&path).expect("failed to open index"); - let tree = root_tree(&repo).expect("failed to get root tree"); +struct TreeIterator<'repo> { + repo: &'repo gix::Repository, + entries: Entries, + current: Option<(String, Entries)>, +} - for entry in tree.iter() { - let entry = entry.expect("failed to read entry"); - let object = entry.object().expect("failed to get object"); +fn collect_entries(tree: &gix::Tree) -> Result { + tree.iter() + .map(|res| res.map(|r| (r.filename().to_string(), r.object_id()))) + .collect() +} - // directories will be trees, and files will be blobs - if !matches!(object.kind, gix::object::Kind::Tree) { - continue; - } +impl Iterator for TreeIterator<'_> { + type Item = (PackageName, IndexFile); - let package_scope = entry.filename().to_string(); + fn next(&mut self) -> Option { + if self + .current + .as_ref() + .is_none_or(|(_, entries)| entries.is_empty()) + { + loop { + let (scope_name, scope_oid) = self.entries.pop_last()?; - for inner_entry in object.into_tree().iter() { - let inner_entry = inner_entry.expect("failed to read inner entry"); - let object = inner_entry.object().expect("failed to get object"); + let object = self + .repo + .find_object(scope_oid) + .expect("failed to get scope object"); - if !matches!(object.kind, gix::object::Kind::Blob) { + if object.kind != gix::objs::Kind::Tree { continue; } - let package_name = inner_entry.filename().to_string(); + let tree = object.into_tree(); + let mut entries = collect_entries(&tree).expect("failed to read scope entries"); - if package_name == SCOPE_INFO_FILE { + entries.remove(SCOPE_INFO_FILE); + + if entries.is_empty() { continue; } - let blob = object.into_blob(); - let string = String::from_utf8(blob.data.clone()).expect("failed to parse utf8"); - - let file: IndexFile = toml::from_str(&string).expect("failed to parse index file"); - - // if this panics, it's an issue with the index. - let name = format!("{package_scope}/{package_name}").parse().unwrap(); - - yield (name, file); + self.current = Some((scope_name, entries)); + break; } } + + let (scope_name, entries) = self.current.as_mut()?; + let (file_name, file_oid) = entries.pop_last()?; + + let object = self + .repo + .find_object(file_oid) + .expect("failed to get scope entry object"); + + if object.kind != gix::objs::Kind::Blob { + return None; + } + + let mut blob = object.into_blob(); + let string = String::from_utf8(blob.take_data()).expect("failed to parse utf8"); + + let file = toml::from_str(&string).expect("failed to parse index file"); + + Some(( + // if this panics, it's an issue with the index. + format!("{scope_name}/{file_name}").parse().unwrap(), + file, + )) } } @@ -114,10 +137,17 @@ pub async fn make_search( .unwrap(); let mut search_writer = search_index.writer(50_000_000).unwrap(); - let stream = all_packages(source, project).await; - pin!(stream); + let path = source.path(project); + let repo = gix::open(path).expect("failed to open index"); + let tree = root_tree(&repo).expect("failed to get root tree"); - while let Some((pkg_name, file)) = stream.next().await { + let iter = TreeIterator { + entries: collect_entries(&tree).expect("failed to read entries"), + repo: &repo, + current: None, + }; + + for (pkg_name, file) in iter { if !file.meta.deprecated.is_empty() { continue; } @@ -163,6 +193,7 @@ pub fn update_search_version(app_state: &AppState, name: &PackageName, entry: &I )).unwrap(); search_writer.commit().unwrap(); + drop(search_writer); app_state.search_reader.reload().unwrap(); } @@ -180,6 +211,7 @@ pub fn search_version_changed(app_state: &AppState, name: &PackageName, file: &I search_writer.delete_term(Term::from_field_text(id_field, &name.to_string())); search_writer.commit().unwrap(); + drop(search_writer); app_state.search_reader.reload().unwrap(); return; diff --git a/registry/src/storage/s3.rs b/registry/src/storage/s3.rs index 93d8736..14e7ebc 100644 --- a/registry/src/storage/s3.rs +++ b/registry/src/storage/s3.rs @@ -1,5 +1,5 @@ use crate::{ - error::{RegistryError, ReqwestErrorExt}, + error::{RegistryError, ReqwestErrorExt as _}, storage::StorageImpl, }; use actix_web::{http::header::LOCATION, HttpResponse}; @@ -7,7 +7,7 @@ use pesde::{names::PackageName, source::ids::VersionId}; use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE}; use rusty_s3::{ actions::{GetObject, PutObject}, - Bucket, Credentials, S3Action, + Bucket, Credentials, S3Action as _, }; use std::{fmt::Display, time::Duration}; diff --git a/src/cli/auth.rs b/src/cli/auth.rs index ea03aa0..05294e0 100644 --- a/src/cli/auth.rs +++ b/src/cli/auth.rs @@ -1,9 +1,9 @@ use crate::cli::config::{read_config, write_config}; -use anyhow::Context; +use anyhow::Context as _; use gix::bstr::BStr; use keyring::Entry; use reqwest::header::AUTHORIZATION; -use serde::{ser::SerializeMap, Deserialize, Serialize}; +use serde::{ser::SerializeMap as _, Deserialize, Serialize}; use std::collections::BTreeMap; use tokio::task::spawn_blocking; use tracing::instrument; diff --git a/src/cli/commands/add.rs b/src/cli/commands/add.rs index 35804d2..82fe468 100644 --- a/src/cli/commands/add.rs +++ b/src/cli/commands/add.rs @@ -1,6 +1,6 @@ -use std::str::FromStr; +use std::str::FromStr as _; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use semver::VersionReq; @@ -15,7 +15,7 @@ use pesde::{ path::{specifier::PathDependencySpecifier, PathPackageSource}, pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource}, specifiers::DependencySpecifiers, - traits::{PackageSource, RefreshOptions, ResolveOptions}, + traits::{PackageSource as _, RefreshOptions, ResolveOptions}, workspace::{specifier::WorkspaceDependencySpecifier, WorkspacePackageSource}, PackageSources, }, @@ -186,8 +186,7 @@ impl AddCommand { .to_string() .split('/') .next_back() - .map(|s| s.to_string()) - .unwrap_or_else(|| url.path.to_string()), + .map_or_else(|| url.path.to_string(), ToString::to_string), AnyPackageIdentifier::Workspace(versioned) => versioned.0.name().to_string(), AnyPackageIdentifier::Path(path) => path .file_name() @@ -203,7 +202,7 @@ impl AddCommand { match specifier { DependencySpecifiers::Pesde(spec) => { - field["name"] = toml_edit::value(spec.name.clone().to_string()); + field["name"] = toml_edit::value(spec.name.to_string()); field["version"] = toml_edit::value(format!("^{}", version_id.version())); if version_id.target() != project_target { @@ -244,7 +243,7 @@ impl AddCommand { println!("added git {}#{} to {dependency_key}", spec.repo, spec.rev); } DependencySpecifiers::Workspace(spec) => { - field["workspace"] = toml_edit::value(spec.name.clone().to_string()); + field["workspace"] = toml_edit::value(spec.name.to_string()); if let AnyPackageIdentifier::Workspace(versioned) = self.name { if let Some(version) = versioned.1 { field["version"] = toml_edit::value(version.to_string()); diff --git a/src/cli/commands/auth/login.rs b/src/cli/commands/auth/login.rs index 429aec8..2da5487 100644 --- a/src/cli/commands/auth/login.rs +++ b/src/cli/commands/auth/login.rs @@ -1,4 +1,4 @@ -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use console::style; use serde::Deserialize; @@ -13,7 +13,7 @@ use crate::cli::{ use pesde::{ source::{ pesde::PesdePackageSource, - traits::{PackageSource, RefreshOptions}, + traits::{PackageSource as _, RefreshOptions}, }, Project, }; @@ -145,12 +145,11 @@ impl LoginCommand { return Ok(access_token); } AccessTokenResponse::Error(e) => match e { - AccessTokenError::AuthorizationPending => continue, + AccessTokenError::AuthorizationPending => {} AccessTokenError::SlowDown { interval: new_interval, } => { interval = std::time::Duration::from_secs(new_interval); - continue; } AccessTokenError::ExpiredToken => { break; diff --git a/src/cli/commands/auth/logout.rs b/src/cli/commands/auth/logout.rs index 20f2c04..f529ff2 100644 --- a/src/cli/commands/auth/logout.rs +++ b/src/cli/commands/auth/logout.rs @@ -2,7 +2,7 @@ use crate::cli::auth::set_token; use clap::Args; #[derive(Debug, Args)] -pub struct LogoutCommand {} +pub struct LogoutCommand; impl LogoutCommand { pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> { diff --git a/src/cli/commands/auth/token.rs b/src/cli/commands/auth/token.rs index 5b8ca91..05129cd 100644 --- a/src/cli/commands/auth/token.rs +++ b/src/cli/commands/auth/token.rs @@ -2,17 +2,14 @@ use crate::cli::auth::get_tokens; use clap::Args; #[derive(Debug, Args)] -pub struct TokenCommand {} +pub struct TokenCommand; impl TokenCommand { pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> { let tokens = get_tokens().await?; - let token = match tokens.0.get(&index_url) { - Some(token) => token, - None => { - println!("not logged in into {index_url}"); - return Ok(()); - } + let Some(token) = tokens.0.get(&index_url) else { + println!("not logged in into {index_url}"); + return Ok(()); }; println!("token for {index_url}: \"{token}\""); diff --git a/src/cli/commands/auth/whoami.rs b/src/cli/commands/auth/whoami.rs index bc7d7cc..6c04a6b 100644 --- a/src/cli/commands/auth/whoami.rs +++ b/src/cli/commands/auth/whoami.rs @@ -3,17 +3,14 @@ use clap::Args; use console::style; #[derive(Debug, Args)] -pub struct WhoAmICommand {} +pub struct WhoAmICommand; impl WhoAmICommand { pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> { let tokens = get_tokens().await?; - let token = match tokens.0.get(&index_url) { - Some(token) => token, - None => { - println!("not logged in into {index_url}"); - return Ok(()); - } + let Some(token) = tokens.0.get(&index_url) else { + println!("not logged in into {index_url}"); + return Ok(()); }; println!( diff --git a/src/cli/commands/cas/prune.rs b/src/cli/commands/cas/prune.rs index 6098776..1e95a6c 100644 --- a/src/cli/commands/cas/prune.rs +++ b/src/cli/commands/cas/prune.rs @@ -5,11 +5,11 @@ use crate::{ }, util::remove_empty_dir, }; -use anyhow::Context; +use anyhow::Context as _; use async_stream::try_stream; use clap::Args; use fs_err::tokio as fs; -use futures::{future::BoxFuture, FutureExt, Stream, StreamExt}; +use futures::{future::BoxFuture, FutureExt as _, Stream, StreamExt as _}; use pesde::{ source::fs::{FsEntry, PackageFs}, Project, @@ -22,7 +22,7 @@ use std::{ use tokio::task::JoinSet; #[derive(Debug, Args)] -pub struct PruneCommand {} +pub struct PruneCommand; async fn read_dir_stream( dir: &Path, @@ -47,7 +47,7 @@ async fn get_nlinks(path: &Path) -> anyhow::Result { // life if rust stabilized the nightly feature from 2019 #[cfg(windows)] { - use std::os::windows::ffi::OsStrExt; + use std::os::windows::ffi::OsStrExt as _; use windows::{ core::PWSTR, Win32::{ @@ -139,7 +139,7 @@ async fn discover_cas_packages(cas_dir: &Path) -> anyhow::Result {} Err(e) => return Err(e.into()), - }; + } let mut manifest = toml_edit::DocumentMut::new(); @@ -232,8 +232,8 @@ impl InitCommand { anyhow::bail!("scripts package has no scripts.") }; - let scripts_field = &mut manifest["scripts"] - .or_insert(toml_edit::Item::Table(toml_edit::Table::new())); + let scripts_field = + manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())); for script_name in scripts.keys() { scripts_field[script_name] = toml_edit::value(format!( @@ -241,7 +241,7 @@ impl InitCommand { )); } - let dev_deps = &mut manifest["dev_dependencies"] + let dev_deps = manifest["dev_dependencies"] .or_insert(toml_edit::Item::Table(toml_edit::Table::new())); let field = &mut dev_deps["scripts"]; diff --git a/src/cli/commands/list.rs b/src/cli/commands/list.rs index 8c376a2..7f66035 100644 --- a/src/cli/commands/list.rs +++ b/src/cli/commands/list.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use crate::cli::{ @@ -14,7 +14,7 @@ use pesde::{ }; #[derive(Debug, Args)] -pub struct ListCommand {} +pub struct ListCommand; impl ListCommand { pub async fn run(self, project: Project) -> anyhow::Result<()> { diff --git a/src/cli/commands/outdated.rs b/src/cli/commands/outdated.rs index 0c19ced..dc0f95c 100644 --- a/src/cli/commands/outdated.rs +++ b/src/cli/commands/outdated.rs @@ -2,12 +2,12 @@ use crate::cli::{ style::{ADDED_STYLE, INFO_STYLE, REMOVED_STYLE, SUCCESS_STYLE}, up_to_date_lockfile, }; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use pesde::{ source::{ specifiers::DependencySpecifiers, - traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions}, + traits::{PackageRef as _, PackageSource as _, RefreshOptions, ResolveOptions}, }, Project, RefreshedSources, }; @@ -82,7 +82,7 @@ impl OutdatedCommand { DependencySpecifiers::Git(_) => {} DependencySpecifiers::Workspace(_) => {} DependencySpecifiers::Path(_) => {} - }; + } } let new_id = source diff --git a/src/cli/commands/patch.rs b/src/cli/commands/patch.rs index cb9e6e7..5794b18 100644 --- a/src/cli/commands/patch.rs +++ b/src/cli/commands/patch.rs @@ -4,7 +4,7 @@ use crate::cli::{ style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX}, up_to_date_lockfile, VersionedPackageName, }; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use console::style; use fs_err::tokio as fs; @@ -12,7 +12,7 @@ use pesde::{ patches::setup_patches_repo, source::{ refs::PackageRefs, - traits::{DownloadOptions, PackageRef, PackageSource}, + traits::{DownloadOptions, PackageRef as _, PackageSource as _}, }, Project, MANIFEST_FILE_NAME, }; @@ -71,9 +71,9 @@ impl PatchCommand { setup_patches_repo(&directory)?; println!( - r#"done! modify the files in the directory, then run {} {}{} to apply. + r"done! modify the files in the directory, then run {} {}{} to apply. {WARN_PREFIX}: do not commit these changes -{}: the {MANIFEST_FILE_NAME} file will be ignored when patching"#, +{}: the {MANIFEST_FILE_NAME} file will be ignored when patching", CLI_STYLE.apply_to(concat!("`", env!("CARGO_BIN_NAME"), " patch-commit")), style(format!("'{}'", directory.display())).cyan().bold(), CLI_STYLE.apply_to("`"), diff --git a/src/cli/commands/patch_commit.rs b/src/cli/commands/patch_commit.rs index ec04737..7da0ec4 100644 --- a/src/cli/commands/patch_commit.rs +++ b/src/cli/commands/patch_commit.rs @@ -1,5 +1,5 @@ use crate::cli::up_to_date_lockfile; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use fs_err::tokio as fs; use pesde::{ @@ -8,7 +8,7 @@ use pesde::{ source::ids::{PackageId, VersionId}, Project, }; -use std::{path::PathBuf, str::FromStr}; +use std::{path::PathBuf, str::FromStr as _}; #[derive(Debug, Args)] pub struct PatchCommitCommand { diff --git a/src/cli/commands/publish.rs b/src/cli/commands/publish.rs index ae48745..b6c24a4 100644 --- a/src/cli/commands/publish.rs +++ b/src/cli/commands/publish.rs @@ -3,7 +3,7 @@ use crate::cli::{ style::{ERROR_PREFIX, ERROR_STYLE, SUCCESS_STYLE, WARN_PREFIX}, up_to_date_lockfile, }; -use anyhow::Context; +use anyhow::Context as _; use async_compression::Level; use clap::Args; use console::style; @@ -15,7 +15,9 @@ use pesde::{ source::{ pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource}, specifiers::DependencySpecifiers, - traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions, ResolveOptions}, + traits::{ + GetTargetOptions, PackageRef as _, PackageSource as _, RefreshOptions, ResolveOptions, + }, workspace::{ specifier::{VersionType, VersionTypeOrReq}, WorkspacePackageSource, @@ -24,12 +26,17 @@ use pesde::{ }, Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME, }; +use relative_path::RelativePath; use reqwest::{header::AUTHORIZATION, StatusCode}; use semver::VersionReq; -use std::{path::PathBuf, sync::Arc}; +use std::{ + collections::{BTreeMap, BTreeSet}, + path::PathBuf, + sync::Arc, +}; use tempfile::Builder; use tokio::{ - io::{AsyncSeekExt, AsyncWriteExt}, + io::{AsyncSeekExt as _, AsyncWriteExt as _}, task::JoinSet, }; @@ -105,7 +112,7 @@ impl PublishCommand { if manifest.target.lib_path().is_none() && manifest.target.bin_path().is_none() - && manifest.target.scripts().is_none_or(|s| s.is_empty()) + && manifest.target.scripts().is_none_or(BTreeMap::is_empty) { anyhow::bail!("no exports found in target"); } @@ -114,7 +121,7 @@ impl PublishCommand { manifest.target, Target::Roblox { .. } | Target::RobloxServer { .. } ) { - if manifest.target.build_files().is_none_or(|f| f.is_empty()) { + if manifest.target.build_files().is_none_or(BTreeSet::is_empty) { anyhow::bail!("no build files found in target"); } @@ -193,8 +200,14 @@ impl PublishCommand { let mut display_build_files: Vec = vec![]; let (lib_path, bin_path, scripts, target_kind) = ( - manifest.target.lib_path().map(|p| p.to_relative_path_buf()), - manifest.target.bin_path().map(|p| p.to_relative_path_buf()), + manifest + .target + .lib_path() + .map(RelativePath::to_relative_path_buf), + manifest + .target + .bin_path() + .map(RelativePath::to_relative_path_buf), manifest.target.scripts().cloned(), manifest.target.kind(), ); @@ -207,7 +220,7 @@ impl PublishCommand { let mut paths = matching_globs( project.package_dir(), - manifest.includes.iter().map(|s| s.as_str()), + manifest.includes.iter().map(String::as_str), true, false, ) @@ -247,13 +260,13 @@ impl PublishCommand { path.display(), ScriptName::RobloxSyncConfigGenerator ); - } else { - anyhow::bail!( - "forbidden file {} was included at `{}`", - file_name.to_string_lossy(), - path.display() - ); } + + anyhow::bail!( + "forbidden file {} was included at `{}`", + file_name.to_string_lossy(), + path.display() + ); } } @@ -263,9 +276,9 @@ impl PublishCommand { .any(|ct| ct == std::path::Component::Normal(ignored_path.as_ref())) }) { anyhow::bail!( - r#"forbidden file {ignored_path} was included. + r"forbidden file {ignored_path} was included. info: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts -info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#, +info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers", ); } } @@ -301,9 +314,8 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p .next() .with_context(|| format!("{name} must contain at least one part"))?; - let first_part = match first_part { - relative_path::Component::Normal(part) => part, - _ => anyhow::bail!("{name} must be within project directory"), + let relative_path::Component::Normal(first_part) = first_part else { + anyhow::bail!("{name} must be within project directory"); }; if paths.insert( @@ -483,8 +495,10 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p VersionReq::STAR } VersionTypeOrReq::Req(r) => r, - v => VersionReq::parse(&format!("{v}{}", manifest.version)) - .with_context(|| format!("failed to parse version for {v}"))?, + VersionTypeOrReq::VersionType(v) => { + VersionReq::parse(&format!("{v}{}", manifest.version)) + .with_context(|| format!("failed to parse version for {v}"))? + } }, index: manifest .indices @@ -528,8 +542,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p manifest .repository .as_ref() - .map(|r| r.as_str()) - .unwrap_or("(none)") + .map_or("(none)", url::Url::as_str) ); let roblox_target = roblox_target.is_some_and(|_| true); @@ -540,7 +553,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p manifest .target .lib_path() - .map_or_else(|| "(none)".to_string(), |p| p.to_string()) + .map_or_else(|| "(none)".to_string(), ToString::to_string) ); if roblox_target { @@ -551,7 +564,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p manifest .target .bin_path() - .map_or_else(|| "(none)".to_string(), |p| p.to_string()) + .map_or_else(|| "(none)".to_string(), ToString::to_string) ); println!( "\tscripts: {}", @@ -706,10 +719,10 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p .await; if project.workspace_dir().is_some() { return result; - } else { - display_err(result, " occurred publishing workspace root"); } + display_err(result, " occurred publishing workspace root"); + run_on_workspace_members(&project, |project| { let reqwest = reqwest.clone(); let this = self.clone(); diff --git a/src/cli/commands/remove.rs b/src/cli/commands/remove.rs index 3d9ab5f..e27eabc 100644 --- a/src/cli/commands/remove.rs +++ b/src/cli/commands/remove.rs @@ -1,6 +1,6 @@ -use std::str::FromStr; +use std::str::FromStr as _; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use crate::cli::{ diff --git a/src/cli/commands/run.rs b/src/cli/commands/run.rs index ae76421..df34e25 100644 --- a/src/cli/commands/run.rs +++ b/src/cli/commands/run.rs @@ -1,18 +1,18 @@ use crate::cli::up_to_date_lockfile; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; -use futures::{StreamExt, TryStreamExt}; +use futures::{StreamExt as _, TryStreamExt as _}; use pesde::{ errors::{ManifestReadError, WorkspaceMembersError}, linking::generator::generate_bin_linking_module, names::{PackageName, PackageNames}, - source::traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions}, + source::traits::{GetTargetOptions, PackageRef as _, PackageSource as _, RefreshOptions}, Project, MANIFEST_FILE_NAME, }; use relative_path::RelativePathBuf; use std::{ - collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command, - sync::Arc, + collections::HashSet, env::current_dir, ffi::OsString, io::Write as _, path::Path, + process::Command, sync::Arc, }; #[derive(Debug, Args)] @@ -51,7 +51,7 @@ impl RunCommand { drop(caller); - std::process::exit(status.code().unwrap_or(1)) + std::process::exit(status.code().unwrap_or(1i32)) }; let Some(package_or_script) = self.package_or_script else { @@ -133,7 +133,7 @@ impl RunCommand { ); return Ok(()); } - }; + } let relative_path = RelativePathBuf::from(package_or_script); let path = relative_path.to_path(project.package_dir()); @@ -158,8 +158,10 @@ impl RunCommand { let members = members .map(|res| { - res.map_err(anyhow::Error::from) - .and_then(|(path, _)| path.canonicalize().map_err(Into::into)) + res.map_err(anyhow::Error::from)? + .0 + .canonicalize() + .map_err(anyhow::Error::from) }) .chain(futures::stream::once(async { workspace_dir.canonicalize().map_err(Into::into) @@ -169,7 +171,7 @@ impl RunCommand { .context("failed to collect workspace members")?; let root = 'finder: { - let mut current_path = path.to_path_buf(); + let mut current_path = path.clone(); loop { let canonical_path = current_path .canonicalize() diff --git a/src/cli/commands/self_install.rs b/src/cli/commands/self_install.rs index 5e30f97..2fe546d 100644 --- a/src/cli/commands/self_install.rs +++ b/src/cli/commands/self_install.rs @@ -3,7 +3,7 @@ use crate::cli::{ version::replace_pesde_bin_exe, HOME_DIR, }; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use console::style; use std::env::current_exe; @@ -22,15 +22,16 @@ impl SelfInstallCommand { { if !self.skip_add_to_path { use crate::cli::style::WARN_STYLE; - use anyhow::Context; + use anyhow::Context as _; use windows_registry::CURRENT_USER; + let bin_dir = crate::cli::bin_dir().await?; + let env = CURRENT_USER .create("Environment") .context("failed to open Environment key")?; let path = env.get_string("Path").context("failed to get Path value")?; - let bin_dir = crate::cli::bin_dir().await?; let bin_dir = bin_dir.to_string_lossy(); let exists = path.split(';').any(|part| *part == bin_dir); @@ -53,7 +54,7 @@ impl SelfInstallCommand { CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")), ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")), ); - } + }; #[cfg(unix)] { @@ -68,7 +69,7 @@ and then restart your shell. ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")), style(format!(r#"export PATH="$PATH:$HOME/{HOME_DIR}/bin""#)).green(), ); - } + }; replace_pesde_bin_exe(¤t_exe().context("failed to get current exe path")?).await?; diff --git a/src/cli/commands/self_upgrade.rs b/src/cli/commands/self_upgrade.rs index 2578f8b..b96adcc 100644 --- a/src/cli/commands/self_upgrade.rs +++ b/src/cli/commands/self_upgrade.rs @@ -8,7 +8,7 @@ use crate::{ }, util::no_build_metadata, }; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use pesde::engine::EngineKind; use semver::VersionReq; diff --git a/src/cli/commands/yank.rs b/src/cli/commands/yank.rs index 562df80..f435c75 100644 --- a/src/cli/commands/yank.rs +++ b/src/cli/commands/yank.rs @@ -1,12 +1,12 @@ use crate::cli::{get_index, style::SUCCESS_STYLE}; -use anyhow::Context; +use anyhow::Context as _; use clap::Args; use pesde::{ manifest::target::TargetKind, names::PackageName, source::{ pesde::PesdePackageSource, - traits::{PackageSource, RefreshOptions}, + traits::{PackageSource as _, RefreshOptions}, }, Project, }; diff --git a/src/cli/config.rs b/src/cli/config.rs index 8083e54..d1da51e 100644 --- a/src/cli/config.rs +++ b/src/cli/config.rs @@ -1,5 +1,5 @@ use crate::cli::{auth::Tokens, home_dir}; -use anyhow::Context; +use anyhow::Context as _; use fs_err::tokio as fs; use serde::{Deserialize, Serialize}; use tracing::instrument; diff --git a/src/cli/install.rs b/src/cli/install.rs index c2d681e..856aca9 100644 --- a/src/cli/install.rs +++ b/src/cli/install.rs @@ -6,7 +6,7 @@ use crate::cli::{ style::{ADDED_STYLE, REMOVED_STYLE, WARN_PREFIX}, up_to_date_lockfile, }; -use anyhow::Context; +use anyhow::Context as _; use console::style; use fs_err::tokio as fs; use pesde::{ @@ -19,7 +19,7 @@ use pesde::{ source::{ pesde::PesdePackageSource, refs::PackageRefs, - traits::{PackageRef, RefreshOptions}, + traits::{PackageRef as _, RefreshOptions}, PackageSources, }, version_matches, Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME, @@ -452,8 +452,8 @@ pub async fn install( print_package_diff( &format!("{} {}:", manifest.name, manifest.target), - old_graph, - new_lockfile.graph, + &old_graph, + &new_lockfile.graph, ); println!("done in {:.2}s", elapsed.as_secs_f64()); @@ -463,20 +463,20 @@ pub async fn install( } /// Prints the difference between two graphs. -pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DependencyGraph) { +pub fn print_package_diff(prefix: &str, old_graph: &DependencyGraph, new_graph: &DependencyGraph) { let mut old_pkg_map = BTreeMap::new(); let mut old_direct_pkg_map = BTreeMap::new(); let mut new_pkg_map = BTreeMap::new(); let mut new_direct_pkg_map = BTreeMap::new(); - for (id, node) in &old_graph { + for (id, node) in old_graph { old_pkg_map.insert(id, node); if node.direct.is_some() { old_direct_pkg_map.insert(id, node); } } - for (id, node) in &new_graph { + for (id, node) in new_graph { new_pkg_map.insert(id, node); if node.direct.is_some() { new_direct_pkg_map.insert(id, node); diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 8eb238b..bcc4a3d 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -2,9 +2,9 @@ use crate::cli::{ config::read_config, style::{ERROR_STYLE, INFO_STYLE, WARN_STYLE}, }; -use anyhow::Context; +use anyhow::Context as _; use fs_err::tokio as fs; -use futures::StreamExt; +use futures::StreamExt as _; use pesde::{ errors::ManifestReadError, lockfile::Lockfile, @@ -135,11 +135,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result anyhow::Result { - let version_id = match self.1 { - Some(version) => version, - None => { - let versions = graph - .keys() - .filter(|id| *id.name() == self.0) - .collect::>(); + let version_id = if let Some(version) = self.1 { + version + } else { + let versions = graph + .keys() + .filter(|id| *id.name() == self.0) + .collect::>(); - match versions.len() { - 0 => anyhow::bail!("package not found"), - 1 => versions[0].version_id().clone(), - _ => anyhow::bail!( - "multiple versions found, please specify one of: {}", - versions - .iter() - .map(|v| v.to_string()) - .collect::>() - .join(", ") - ), - } + match versions.len() { + 0 => anyhow::bail!("package not found"), + 1 => versions[0].version_id().clone(), + _ => anyhow::bail!( + "multiple versions found, please specify one of: {}", + versions + .iter() + .map(ToString::to_string) + .collect::>() + .join(", ") + ), } }; @@ -336,18 +331,17 @@ pub async fn get_index(project: &Project, index: Option<&str>) -> anyhow::Result }, }; - match index_url { - Some(url) => Ok(url), - None => { - let index_name = index.unwrap_or(DEFAULT_INDEX_NAME); - - manifest - .unwrap() - .indices - .remove(index_name) - .with_context(|| format!("index {index_name} not found in manifest")) - } + if let Some(url) = index_url { + return Ok(url); } + + let index_name = index.unwrap_or(DEFAULT_INDEX_NAME); + + manifest + .unwrap() + .indices + .remove(index_name) + .with_context(|| format!("index {index_name} not found in manifest")) } pub fn dep_type_to_key(dep_type: DependencyType) -> &'static str { diff --git a/src/cli/reporters.rs b/src/cli/reporters.rs index 4f7bdf4..5dacda1 100644 --- a/src/cli/reporters.rs +++ b/src/cli/reporters.rs @@ -187,7 +187,7 @@ impl PatchesReporter for CliReporter { CliPatchProgressReporter { root_reporter: self, - name: name.to_string(), + name, progress, } } diff --git a/src/cli/version.rs b/src/cli/version.rs index 43056e7..f84ad81 100644 --- a/src/cli/version.rs +++ b/src/cli/version.rs @@ -9,19 +9,19 @@ use crate::{ }, util::no_build_metadata, }; -use anyhow::Context; +use anyhow::Context as _; use console::Style; use fs_err::tokio as fs; use jiff::SignedDuration; use pesde::{ engine::{ source::{ - traits::{DownloadOptions, EngineSource, ResolveOptions}, + traits::{DownloadOptions, EngineSource as _, ResolveOptions}, EngineSources, }, EngineKind, }, - reporters::DownloadsReporter, + reporters::DownloadsReporter as _, version_matches, }; use semver::{Version, VersionReq}; @@ -167,7 +167,7 @@ pub async fn get_or_download_engine( .with_extension(std::env::consts::EXE_EXTENSION)); } - let path = run_with_reporter(|_, root_progress, reporter| async { + run_with_reporter(|_, root_progress, reporter| async { let root_progress = root_progress; let reporter = reporter; @@ -221,19 +221,17 @@ pub async fn get_or_download_engine( .await .context("failed to write to file")?; + make_executable(&path) + .await + .context("failed to make downloaded version executable")?; + + if engine != EngineKind::Pesde { + make_linker_if_needed(engine).await?; + } + Ok::<_, anyhow::Error>(path) }) - .await?; - - make_executable(&path) - .await - .context("failed to make downloaded version executable")?; - - if engine != EngineKind::Pesde { - make_linker_if_needed(engine).await?; - } - - Ok(path) + .await } #[instrument(level = "trace")] @@ -243,7 +241,7 @@ pub async fn replace_pesde_bin_exe(with: &Path) -> anyhow::Result<()> { .join(EngineKind::Pesde.to_string()) .with_extension(std::env::consts::EXE_EXTENSION); - let exists = bin_exe_path.exists(); + let exists = fs::metadata(&bin_exe_path).await.is_ok(); if cfg!(target_os = "linux") && exists { fs::remove_file(&bin_exe_path) @@ -277,9 +275,8 @@ pub async fn make_linker_if_needed(engine: EngineKind) -> anyhow::Result<()> { let linker = bin_dir .join(engine.to_string()) .with_extension(std::env::consts::EXE_EXTENSION); - let exists = linker.exists(); - if !exists { + if fs::metadata(&linker).await.is_err() { let exe = current_exe().context("failed to get current exe path")?; #[cfg(windows)] diff --git a/src/download.rs b/src/download.rs index 3ced222..181a5c8 100644 --- a/src/download.rs +++ b/src/download.rs @@ -1,10 +1,10 @@ use crate::{ graph::{DependencyGraph, DependencyGraphNode}, - reporters::{DownloadProgressReporter, DownloadsReporter}, + reporters::{DownloadProgressReporter as _, DownloadsReporter}, source::{ fs::PackageFs, ids::PackageId, - traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions}, + traits::{DownloadOptions, PackageRef as _, PackageSource as _, RefreshOptions}, }, Project, RefreshedSources, }; @@ -12,7 +12,7 @@ use async_stream::try_stream; use futures::Stream; use std::{num::NonZeroUsize, sync::Arc}; use tokio::{sync::Semaphore, task::JoinSet}; -use tracing::{instrument, Instrument}; +use tracing::{instrument, Instrument as _}; /// Options for downloading. #[derive(Debug)] @@ -116,7 +116,7 @@ impl Project { let _permit = semaphore.acquire().await; - if let Some(ref progress_reporter) = progress_reporter { + if let Some(progress_reporter) = &progress_reporter { progress_reporter.report_start(); } diff --git a/src/download_and_link.rs b/src/download_and_link.rs index cb2ad35..a696b2e 100644 --- a/src/download_and_link.rs +++ b/src/download_and_link.rs @@ -9,12 +9,12 @@ use crate::{ reporters::{DownloadsReporter, PatchesReporter}, source::{ ids::PackageId, - traits::{GetTargetOptions, PackageRef, PackageSource}, + traits::{GetTargetOptions, PackageRef as _, PackageSource as _}, }, Project, RefreshedSources, SCRIPTS_LINK_FOLDER, }; use fs_err::tokio as fs; -use futures::TryStreamExt; +use futures::TryStreamExt as _; use std::{ collections::HashMap, convert::Infallible, @@ -24,11 +24,11 @@ use std::{ sync::Arc, }; use tokio::{pin, task::JoinSet}; -use tracing::{instrument, Instrument}; +use tracing::{instrument, Instrument as _}; /// Hooks to perform actions after certain events during download and linking. #[allow(unused_variables)] -pub trait DownloadAndLinkHooks { +pub trait DownloadAndLinkHooks: Send + Sync { /// The error type for the hooks. type Error: std::error::Error + Send + Sync + 'static; @@ -89,6 +89,7 @@ where Hooks: DownloadAndLinkHooks + Send + Sync + 'static, { /// Creates a new download options with the given reqwest client and reporter. + #[must_use] pub fn new(reqwest: reqwest::Client) -> Self { Self { reqwest, @@ -102,36 +103,42 @@ where } /// Sets the downloads reporter. + #[must_use] pub fn reporter(mut self, reporter: impl Into>) -> Self { self.reporter.replace(reporter.into()); self } /// Sets the download and link hooks. + #[must_use] pub fn hooks(mut self, hooks: impl Into>) -> Self { self.hooks.replace(hooks.into()); self } /// Sets the refreshed sources. + #[must_use] pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self { self.refreshed_sources = refreshed_sources; self } /// Sets whether to skip dev dependencies. + #[must_use] pub fn prod(mut self, prod: bool) -> Self { self.prod = prod; self } /// Sets the max number of concurrent network requests. + #[must_use] pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self { self.network_concurrency = network_concurrency; self } /// Sets whether to re-install all dependencies even if they are already installed + #[must_use] pub fn force(mut self, force: bool) -> Self { self.force = force; self @@ -294,7 +301,7 @@ impl Project { node.container_folder_from_project(&id, project, manifest_target_kind) .as_path(), ); - let id = Arc::new(id.clone()); + let id = Arc::new(id); let project = project.clone(); async move { @@ -311,7 +318,7 @@ impl Project { Ok::<_, errors::DownloadAndLinkError>(( Arc::into_inner(id).unwrap(), - DependencyGraphNodeWithTarget { node, target }, + DependencyGraphNodeWithTarget { target, node }, )) } }) diff --git a/src/engine/mod.rs b/src/engine/mod.rs index 82e059d..eae4a03 100644 --- a/src/engine/mod.rs +++ b/src/engine/mod.rs @@ -39,7 +39,8 @@ impl FromStr for EngineKind { impl EngineKind { /// Returns the source to get this engine from - pub fn source(&self) -> EngineSources { + #[must_use] + pub fn source(self) -> EngineSources { match self { EngineKind::Pesde => EngineSources::pesde(), EngineKind::Lune => EngineSources::lune(), diff --git a/src/engine/source/archive.rs b/src/engine/source/archive.rs index 28a2476..bda3a49 100644 --- a/src/engine/source/archive.rs +++ b/src/engine/source/archive.rs @@ -1,4 +1,4 @@ -use futures::StreamExt; +use futures::StreamExt as _; use std::{ collections::BTreeSet, mem::ManuallyDrop, @@ -8,10 +8,10 @@ use std::{ task::{Context, Poll}, }; use tokio::{ - io::{AsyncBufRead, AsyncRead, AsyncReadExt, ReadBuf}, + io::{AsyncBufRead, AsyncRead, AsyncReadExt as _, ReadBuf}, pin, }; -use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt}; +use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt as _}; /// The kind of encoding used for the archive #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -53,7 +53,7 @@ impl FromStr for ArchiveInfo { } } -pub(crate) type ArchiveReader = Pin>; +pub(crate) type ArchiveReader = Pin>; /// An archive pub struct Archive { @@ -84,9 +84,9 @@ impl AsyncRead for TarReader { } enum ArchiveEntryInner { - Tar(tokio_tar::Entry>), + Tar(Box>>), Zip { - archive: *mut async_zip::tokio::read::seek::ZipFileReader>>, + archive: ArchivePointer, reader: ManuallyDrop< Compat< async_zip::tokio::read::ZipEntryReader< @@ -105,7 +105,7 @@ impl Drop for ArchiveEntryInner { Self::Tar(_) => {} Self::Zip { archive, reader } => unsafe { ManuallyDrop::drop(reader); - drop(Box::from_raw(*archive)); + drop(Box::from_raw(archive.0)); }, } } @@ -131,6 +131,10 @@ impl AsyncRead for ArchiveEntry { } } +struct ArchivePointer(*mut async_zip::tokio::read::seek::ZipFileReader>>); + +unsafe impl Send for ArchivePointer {} + impl Archive { /// Finds the executable in the archive and returns it as an [`ArchiveEntry`] pub async fn find_executable( @@ -226,7 +230,7 @@ impl Archive { let path = entry.path()?; if path == candidate.path { - return Ok(ArchiveEntry(ArchiveEntryInner::Tar(entry))); + return Ok(ArchiveEntry(ArchiveEntryInner::Tar(Box::new(entry)))); } } } @@ -269,8 +273,8 @@ impl Archive { let path: &Path = entry.filename().as_str()?.as_ref(); if candidate.path == path { - let ptr = Box::into_raw(Box::new(archive)); - let reader = (unsafe { &mut *ptr }).reader_without_entry(i).await?; + let ptr = ArchivePointer(Box::into_raw(Box::new(archive))); + let reader = (unsafe { &mut *ptr.0 }).reader_without_entry(i).await?; return Ok(ArchiveEntry(ArchiveEntryInner::Zip { archive: ptr, reader: ManuallyDrop::new(reader.compat()), diff --git a/src/engine/source/mod.rs b/src/engine/source/mod.rs index b8a8a67..cc8a50b 100644 --- a/src/engine/source/mod.rs +++ b/src/engine/source/mod.rs @@ -83,6 +83,7 @@ impl EngineSource for EngineSources { impl EngineSources { /// Returns the source for the pesde engine + #[must_use] pub fn pesde() -> Self { let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3); let (owner, repo) = ( @@ -102,6 +103,7 @@ impl EngineSources { } /// Returns the source for the lune engine + #[must_use] pub fn lune() -> Self { EngineSources::GitHub(github::GitHubEngineSource { owner: "lune-org".into(), diff --git a/src/graph.rs b/src/graph.rs index e49a1e7..b326abc 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -7,7 +7,7 @@ use crate::{ ids::{PackageId, VersionId}, refs::PackageRefs, specifiers::DependencySpecifiers, - traits::PackageRef, + traits::PackageRef as _, }, Project, PACKAGES_CONTAINER_NAME, }; @@ -49,6 +49,7 @@ impl DependencyGraphNode { } /// Returns the folder to store the contents of the package in + #[must_use] pub fn container_folder(&self, package_id: &PackageId) -> PathBuf { let (name, v_id) = package_id.parts(); @@ -68,6 +69,7 @@ impl DependencyGraphNode { } /// Returns the folder to store the contents of the package in starting from the project's package directory + #[must_use] pub fn container_folder_from_project( &self, package_id: &PackageId, diff --git a/src/lib.rs b/src/lib.rs index f9af0e1..2f4df21 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ -#![warn(missing_docs, clippy::redundant_closure_for_method_calls)] +#![warn(missing_docs)] +#![deny(clippy::future_not_send)] //! A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune. //! pesde has its own registry, however it can also use Wally, and Git repositories as package sources. //! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau. @@ -7,7 +8,7 @@ use crate::{ lockfile::Lockfile, manifest::{target::TargetKind, Manifest}, source::{ - traits::{PackageSource, RefreshOptions}, + traits::{PackageSource as _, RefreshOptions}, PackageSources, }, }; @@ -19,12 +20,12 @@ use semver::{Version, VersionReq}; use std::{ collections::{HashMap, HashSet}, fmt::Debug, - hash::{Hash, Hasher}, + hash::{Hash as _, Hasher as _}, path::{Path, PathBuf}, sync::Arc, }; use tracing::instrument; -use wax::Pattern; +use wax::Pattern as _; /// Downloading packages pub mod download; @@ -84,12 +85,14 @@ pub struct AuthConfig { impl AuthConfig { /// Create a new `AuthConfig` + #[must_use] pub fn new() -> Self { AuthConfig::default() } /// Set the tokens /// Panics if the `AuthConfig` is shared + #[must_use] pub fn with_tokens, S: AsRef>( mut self, tokens: I, @@ -103,17 +106,20 @@ impl AuthConfig { /// Set the git credentials /// Panics if the `AuthConfig` is shared + #[must_use] pub fn with_git_credentials(mut self, git_credentials: Option) -> Self { Arc::get_mut(&mut self.shared).unwrap().git_credentials = git_credentials; self } /// Get the tokens + #[must_use] pub fn tokens(&self) -> &HashMap { &self.shared.tokens } /// Get the git credentials + #[must_use] pub fn git_credentials(&self) -> Option<&Account> { self.shared.git_credentials.as_ref() } @@ -137,6 +143,7 @@ pub struct Project { impl Project { /// Create a new `Project` + #[must_use] pub fn new( package_dir: impl AsRef, workspace_dir: Option>, @@ -156,26 +163,31 @@ impl Project { } /// The directory of the package + #[must_use] pub fn package_dir(&self) -> &Path { &self.shared.package_dir } /// The directory of the workspace this package belongs to, if any + #[must_use] pub fn workspace_dir(&self) -> Option<&Path> { self.shared.workspace_dir.as_deref() } /// The directory to store general-purpose data + #[must_use] pub fn data_dir(&self) -> &Path { &self.shared.data_dir } /// The CAS (content-addressable storage) directory + #[must_use] pub fn cas_dir(&self) -> &Path { &self.shared.cas_dir } /// The authentication configuration + #[must_use] pub fn auth_config(&self) -> &AuthConfig { &self.shared.auth_config } @@ -324,7 +336,7 @@ pub async fn matching_globs<'a, P: AsRef + Debug, I: IntoIterator>>); impl RefreshedSources { /// Create a new empty `RefreshedSources` + #[must_use] pub fn new() -> Self { RefreshedSources::default() } @@ -418,9 +431,9 @@ pub async fn find_roots( if get_workspace_members(&path).await?.contains(&cwd) { // initializing a new member of a workspace return Ok((cwd, Some(path))); - } else { - project_root = Some(path); } + + project_root = Some(path); } (None, Some(_)) => unreachable!(), @@ -434,6 +447,7 @@ pub async fn find_roots( /// Returns whether a version matches a version requirement /// Differs from `VersionReq::matches` in that EVERY version matches `*` +#[must_use] pub fn version_matches(req: &VersionReq, version: &Version) -> bool { *req == VersionReq::STAR || req.matches(version) } diff --git a/src/linking/generator.rs b/src/linking/generator.rs index 5d9f7d3..1a8ecc9 100644 --- a/src/linking/generator.rs +++ b/src/linking/generator.rs @@ -18,13 +18,13 @@ impl Visitor for TypeVisitor { let mut declaration_generics = vec![]; let mut generics = vec![]; - for generic in declaration.generics().iter() { + for generic in declaration.generics() { declaration_generics.push(generic.to_string()); if generic.default_type().is_some() { - generics.push(generic.parameter().to_string()) + generics.push(generic.parameter().to_string()); } else { - generics.push(generic.to_string()) + generics.push(generic.to_string()); } } @@ -64,6 +64,7 @@ pub(crate) fn get_file_types(file: &str) -> Vec { } /// Generate a linking module for a library +#[must_use] pub fn generate_lib_linking_module, S: AsRef>( path: &str, types: I, @@ -119,6 +120,7 @@ fn luau_style_path(path: &Path) -> String { /// Get the require path for a library #[instrument(skip(project_manifest), level = "trace")] #[allow(clippy::too_many_arguments)] +#[must_use] pub fn get_lib_require_path( target: TargetKind, base_dir: &Path, @@ -182,26 +184,27 @@ pub fn get_lib_require_path( } _ => None, }) - .collect::>() - .join(""); + .collect::(); return Ok(format!("{prefix}{path}")); - }; + } Ok(luau_style_path(&path)) } /// Generate a linking module for a binary +#[must_use] pub fn generate_bin_linking_module>(package_root: P, require_path: &str) -> String { format!( - r#"_G.PESDE_ROOT = {:?} -return require({require_path})"#, + r"_G.PESDE_ROOT = {:?} +return require({require_path})", package_root.as_ref().to_string_lossy() ) } /// Get the require path for a binary #[instrument(level = "trace")] +#[must_use] pub fn get_bin_require_path( base_dir: &Path, bin_file: &RelativePath, @@ -215,12 +218,14 @@ pub fn get_bin_require_path( } /// Generate a linking module for a script +#[must_use] pub fn generate_script_linking_module(require_path: &str) -> String { - format!(r#"return require({require_path})"#) + format!(r"return require({require_path})") } /// Get the require path for a script #[instrument(level = "trace")] +#[must_use] pub fn get_script_require_path( base_dir: &Path, script_file: &RelativePath, diff --git a/src/linking/incremental.rs b/src/linking/incremental.rs index 4aa4d02..d1c6ef7 100644 --- a/src/linking/incremental.rs +++ b/src/linking/incremental.rs @@ -3,7 +3,7 @@ use crate::{ Project, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER, }; use fs_err::tokio as fs; -use futures::FutureExt; +use futures::FutureExt as _; use std::{ collections::HashSet, path::{Component, Path, PathBuf}, @@ -12,7 +12,7 @@ use std::{ use tokio::task::JoinSet; fn index_entry( - entry: fs::DirEntry, + entry: &fs::DirEntry, packages_index_dir: &Path, tasks: &mut JoinSet>, used_paths: &Arc>, @@ -54,13 +54,13 @@ fn index_entry( } #[cfg(feature = "wally-compat")] { - if !used_paths.contains(&path_relative) { - fs::remove_dir_all(path).await?; - } else { + if used_paths.contains(&path_relative) { #[cfg(feature = "patches")] if !patched_packages.contains(&path_relative) { crate::patches::remove_patch(path.join(package_name)).await?; } + } else { + fs::remove_dir_all(path).await?; } return Ok(()); @@ -104,7 +104,7 @@ fn packages_entry( ) { let expected_aliases = expected_aliases.clone(); tasks.spawn(async move { - if !entry.file_type().await?.is_file() { + if entry.file_type().await?.is_dir() { return Ok(()); } @@ -241,7 +241,7 @@ impl Project { tokio::select! { Some(entry) = index_entries.next_entry().map(Result::transpose) => { index_entry( - entry?, + &entry?, &packages_index_dir, &mut tasks, &used_paths, diff --git a/src/linking/mod.rs b/src/linking/mod.rs index 804715c..daf4c7c 100644 --- a/src/linking/mod.rs +++ b/src/linking/mod.rs @@ -6,7 +6,7 @@ use crate::{ source::{ fs::{cas_path, store_in_cas}, ids::PackageId, - traits::PackageRef, + traits::PackageRef as _, }, Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER, }; @@ -18,7 +18,7 @@ use std::{ sync::Arc, }; use tokio::task::{spawn_blocking, JoinSet}; -use tracing::{instrument, Instrument}; +use tracing::{instrument, Instrument as _}; /// Generates linking modules for a project pub mod generator; @@ -41,7 +41,7 @@ async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std: #[cfg(windows)] Err(e) if e.kind() == std::io::ErrorKind::PermissionDenied => {} Err(e) => return Err(e), - }; + } fs::hard_link(cas_path(&hash, cas_dir), destination).await } @@ -102,7 +102,9 @@ impl Project { manifest_target_kind, ); - let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND { + let types = if lib_file.as_str() == LINK_LIB_NO_FILE_FOUND { + vec![] + } else { let lib_file = lib_file.to_path(&container_folder); let contents = match fs::read_to_string(&lib_file).await { @@ -122,8 +124,6 @@ impl Project { tracing::debug!("contains {} exported types", types.len()); types - } else { - vec![] }; if let Some(build_files) = Some(&node.target) diff --git a/src/lockfile.rs b/src/lockfile.rs index 157790c..1f30f20 100644 --- a/src/lockfile.rs +++ b/src/lockfile.rs @@ -107,6 +107,8 @@ pub mod old { impl LockfileOld { /// Converts this lockfile to a new lockfile + #[must_use] + #[allow(clippy::wrong_self_convention)] pub fn to_new(self) -> super::Lockfile { super::Lockfile { name: self.name, diff --git a/src/main.rs b/src/main.rs index bf3e540..60396d3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ #[cfg(feature = "version-management")] use crate::cli::version::{check_for_updates, current_version, get_or_download_engine}; use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR}; -use anyhow::Context; +use anyhow::Context as _; use clap::{builder::styling::AnsiColor, Parser}; use fs_err::tokio as fs; use indicatif::MultiProgress; @@ -10,13 +10,14 @@ use semver::VersionReq; use std::{ io, path::{Path, PathBuf}, - str::FromStr, + str::FromStr as _, sync::Mutex, }; use tempfile::NamedTempFile; use tracing::instrument; use tracing_subscriber::{ - filter::LevelFilter, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, + filter::LevelFilter, fmt::MakeWriter, layer::SubscriberExt as _, util::SubscriberInitExt as _, + EnvFilter, }; mod cli; @@ -98,8 +99,8 @@ pub struct IndicatifWriter; impl IndicatifWriter { fn suspend R, R>(f: F) -> R { - match *PROGRESS_BARS.lock().unwrap() { - Some(ref progress_bars) => progress_bars.suspend(f), + match &*PROGRESS_BARS.lock().unwrap() { + Some(progress_bars) => progress_bars.suspend(f), None => f(), } } @@ -191,8 +192,8 @@ async fn run() -> anyhow::Result<()> { .status() .expect("failed to run lune"); - std::process::exit(status.code().unwrap_or(1)); - } + std::process::exit(status.code().unwrap_or(1i32)); + }; let tracing_env_filter = EnvFilter::builder() .with_default_directive(LevelFilter::INFO.into()) @@ -315,8 +316,8 @@ async fn run() -> anyhow::Result<()> { .status() .expect("failed to run new version"); - std::process::exit(status.code().unwrap_or(1)); - } + std::process::exit(status.code().unwrap_or(1i32)); + }; #[cfg(feature = "version-management")] display_err( diff --git a/src/manifest/mod.rs b/src/manifest/mod.rs index fdfa905..2a55a8b 100644 --- a/src/manifest/mod.rs +++ b/src/manifest/mod.rs @@ -155,7 +155,7 @@ impl Eq for Alias {} impl Hash for Alias { fn hash(&self, state: &mut H) { - self.0.to_lowercase().hash(state) + self.0.to_lowercase().hash(state); } } @@ -204,6 +204,7 @@ impl schemars::JsonSchema for Alias { impl Alias { /// Get the alias as a string + #[must_use] pub fn as_str(&self) -> &str { &self.0 } diff --git a/src/manifest/target.rs b/src/manifest/target.rs index 62421d3..100ddc6 100644 --- a/src/manifest/target.rs +++ b/src/manifest/target.rs @@ -59,6 +59,7 @@ impl TargetKind { /// The folder to store packages in for this target /// self is the project's target, dependency is the target of the dependency + #[must_use] pub fn packages_folder(self, dependency: Self) -> String { // the code below might seem better, but it's just going to create issues with users trying // to use a build script, since imports would break between targets @@ -71,7 +72,8 @@ impl TargetKind { } /// Returns whether this target is a Roblox target - pub fn is_roblox(&self) -> bool { + #[must_use] + pub fn is_roblox(self) -> bool { matches!(self, TargetKind::Roblox | TargetKind::RobloxServer) } } @@ -135,6 +137,7 @@ pub enum Target { impl Target { /// Returns the kind of this target + #[must_use] pub fn kind(&self) -> TargetKind { match self { Target::Roblox { .. } => TargetKind::Roblox, @@ -145,6 +148,7 @@ impl Target { } /// Returns the path to the lib export file + #[must_use] pub fn lib_path(&self) -> Option<&RelativePath> { match self { Target::Roblox { lib, .. } => lib.as_deref(), @@ -155,6 +159,7 @@ impl Target { } /// Returns the path to the bin export file + #[must_use] pub fn bin_path(&self) -> Option<&RelativePath> { match self { Target::Roblox { .. } => None, @@ -165,6 +170,7 @@ impl Target { } /// Returns the Roblox build files + #[must_use] pub fn build_files(&self) -> Option<&BTreeSet> { match self { Target::Roblox { build_files, .. } => Some(build_files), @@ -174,6 +180,7 @@ impl Target { } /// Returns the scripts exported by this target + #[must_use] pub fn scripts(&self) -> Option<&BTreeMap> { match self { Target::Lune { scripts, .. } => Some(scripts), diff --git a/src/names.rs b/src/names.rs index 31821e8..1b126b9 100644 --- a/src/names.rs +++ b/src/names.rs @@ -88,21 +88,25 @@ impl schemars::JsonSchema for PackageName { impl PackageName { /// Returns the parts of the package name + #[must_use] pub fn as_str(&self) -> (&str, &str) { (&self.0, &self.1) } /// Returns the package name as a string suitable for use in the filesystem + #[must_use] pub fn escaped(&self) -> String { format!("{}+{}", self.0, self.1) } /// Returns the scope of the package name + #[must_use] pub fn scope(&self) -> &str { &self.0 } /// Returns the name of the package name + #[must_use] pub fn name(&self) -> &str { &self.1 } @@ -123,6 +127,7 @@ ser_display_deser_fromstr!(PackageNames); impl PackageNames { /// Returns the parts of the package name + #[must_use] pub fn as_str(&self) -> (&str, &str) { match self { PackageNames::Pesde(name) => name.as_str(), @@ -132,6 +137,7 @@ impl PackageNames { } /// Returns the package name as a string suitable for use in the filesystem + #[must_use] pub fn escaped(&self) -> String { match self { PackageNames::Pesde(name) => name.escaped(), @@ -146,6 +152,7 @@ impl PackageNames { } /// Returns the scope of the package name + #[must_use] pub fn scope(&self) -> &str { match self { PackageNames::Pesde(name) => name.scope(), @@ -155,6 +162,7 @@ impl PackageNames { } /// Returns the name of the package name + #[must_use] pub fn name(&self) -> &str { match self { PackageNames::Pesde(name) => name.name(), @@ -181,7 +189,7 @@ impl FromStr for PackageNames { #[cfg(feature = "wally-compat")] if let Some(wally_name) = s .strip_prefix("wally#") - .or_else(|| if s.contains('-') { Some(s) } else { None }) + .or_else(|| s.contains('-').then_some(s)) .and_then(|s| wally::WallyPackageName::from_str(s).ok()) { return Ok(PackageNames::Wally(wally_name)); @@ -259,21 +267,25 @@ pub mod wally { impl WallyPackageName { /// Returns the parts of the package name + #[must_use] pub fn as_str(&self) -> (&str, &str) { (&self.0, &self.1) } /// Returns the package name as a string suitable for use in the filesystem + #[must_use] pub fn escaped(&self) -> String { format!("wally#{}+{}", self.0, self.1) } /// Returns the scope of the package name + #[must_use] pub fn scope(&self) -> &str { &self.0 } /// Returns the name of the package name + #[must_use] pub fn name(&self) -> &str { &self.1 } @@ -316,6 +328,7 @@ pub mod errors { /// Errors that can occur when working with Wally package names #[cfg(feature = "wally-compat")] + #[allow(clippy::enum_variant_names)] #[derive(Debug, Error)] pub enum WallyPackageNameError { /// The package name is not in the format `scope/name` diff --git a/src/patches.rs b/src/patches.rs index 6144a9f..71f3d89 100644 --- a/src/patches.rs +++ b/src/patches.rs @@ -1,10 +1,10 @@ use crate::{ - reporters::{PatchProgressReporter, PatchesReporter}, + reporters::{PatchProgressReporter as _, PatchesReporter}, source::ids::PackageId, MANIFEST_FILE_NAME, }; use fs_err::tokio as fs; -use futures::TryFutureExt; +use futures::TryFutureExt as _; use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature}; use std::{ path::{Path, PathBuf}, @@ -37,7 +37,7 @@ pub fn setup_patches_repo>(dir: P) -> Result( let chunk = match chunk { Ok(chunk) => chunk, Err(err) => { - yield Err(std::io::Error::new(std::io::ErrorKind::Other, err)); + yield Err(std::io::Error::other(err)); continue; } }; diff --git a/src/resolver.rs b/src/resolver.rs index 79f3643..4de51ae 100644 --- a/src/resolver.rs +++ b/src/resolver.rs @@ -5,13 +5,13 @@ use crate::{ ids::PackageId, pesde::PesdePackageSource, specifiers::DependencySpecifiers, - traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions}, + traits::{PackageRef as _, PackageSource as _, RefreshOptions, ResolveOptions}, PackageSources, }, Project, RefreshedSources, }; use std::collections::{btree_map::Entry, HashMap, VecDeque}; -use tracing::{instrument, Instrument}; +use tracing::{instrument, Instrument as _}; fn insert_node( graph: &mut DependencyGraph, @@ -183,7 +183,7 @@ impl Project { .indices .get(&specifier.index) .ok_or_else(|| errors::DependencyGraphError::IndexNotFound( - specifier.index.to_string(), + specifier.index.clone(), ))? .clone() } else { @@ -203,7 +203,7 @@ impl Project { .wally_indices .get(&specifier.index) .ok_or_else(|| errors::DependencyGraphError::WallyIndexNotFound( - specifier.index.to_string(), + specifier.index.clone(), ))? .clone() } else { @@ -297,11 +297,7 @@ impl Project { } let node = DependencyGraphNode { - direct: if depth == 0 { - Some((alias.clone(), specifier.clone(), ty)) - } else { - None - }, + direct: (depth == 0).then(|| (alias.clone(), specifier.clone(), ty)), pkg_ref: pkg_ref.clone(), dependencies: Default::default(), resolved_ty, diff --git a/src/scripts.rs b/src/scripts.rs index 01f4027..29337cb 100644 --- a/src/scripts.rs +++ b/src/scripts.rs @@ -1,5 +1,5 @@ use crate::Project; -use futures::FutureExt; +use futures::FutureExt as _; use std::{ ffi::OsStr, fmt::{Debug, Display, Formatter}, @@ -7,7 +7,7 @@ use std::{ process::Stdio, }; use tokio::{ - io::{AsyncBufReadExt, BufReader}, + io::{AsyncBufReadExt as _, BufReader}, process::Command, }; use tracing::instrument; diff --git a/src/source/fs.rs b/src/source/fs.rs index 70ba16a..fb31304 100644 --- a/src/source/fs.rs +++ b/src/source/fs.rs @@ -5,7 +5,7 @@ use crate::{ use fs_err::tokio as fs; use relative_path::RelativePathBuf; use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::{ collections::BTreeMap, fmt::Debug, @@ -13,7 +13,7 @@ use std::{ }; use tempfile::Builder; use tokio::{ - io::{AsyncReadExt, AsyncWriteExt}, + io::{AsyncReadExt as _, AsyncWriteExt as _}, pin, task::JoinSet, }; @@ -112,7 +112,7 @@ pub(crate) async fn store_in_cas } Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {} Err(e) => return Err(e.error), - }; + } Ok(hash) } diff --git a/src/source/git/mod.rs b/src/source/git/mod.rs index 29d6713..9b92dc8 100644 --- a/src/source/git/mod.rs +++ b/src/source/git/mod.rs @@ -8,7 +8,9 @@ use crate::{ git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier}, git_index::{read_file, GitBasedSource}, specifiers::DependencySpecifiers, - traits::{DownloadOptions, GetTargetOptions, PackageRef, RefreshOptions, ResolveOptions}, + traits::{ + DownloadOptions, GetTargetOptions, PackageRef as _, RefreshOptions, ResolveOptions, + }, PackageSource, ResolveResult, VersionId, ADDITIONAL_FORBIDDEN_FILES, IGNORED_DIRS, IGNORED_FILES, }, @@ -48,6 +50,7 @@ impl GitBasedSource for GitPackageSource { impl GitPackageSource { /// Creates a new Git package source + #[must_use] pub fn new(repo_url: Url) -> Self { Self { repo_url } } @@ -59,7 +62,7 @@ impl GitPackageSource { fn transform_pesde_dependencies( manifest: &Manifest, - repo_url: Url, + repo_url: &Url, rev: &str, root_tree: &gix::Tree, ) -> Result, errors::ResolveError> { @@ -77,7 +80,7 @@ fn transform_pesde_dependencies( .get(&specifier.index) .ok_or_else(|| { errors::ResolveError::PesdeIndexNotFound( - specifier.index.to_string(), + specifier.index.clone(), Box::new(repo_url.clone()), ) })? @@ -90,7 +93,7 @@ fn transform_pesde_dependencies( .get(&specifier.index) .ok_or_else(|| { errors::ResolveError::WallyIndexNotFound( - specifier.index.to_string(), + specifier.index.clone(), Box::new(repo_url.clone()), ) })? @@ -138,10 +141,10 @@ fn transform_pesde_dependencies( repo: repo_url.clone(), rev: rev.to_string(), path: Some(path), - }) + }); } DependencySpecifiers::Path(_) => { - return Err(errors::ResolveError::Path(Box::new(repo_url.clone()))) + return Err(errors::ResolveError::Path(Box::new(repo_url.clone()))); } } @@ -293,12 +296,8 @@ impl PackageSource for GitPackageSource { return Err(errors::ResolveError::NoManifest(Box::new(repo_url.clone()))); }; - let dependencies = transform_pesde_dependencies( - &manifest, - repo_url.clone(), - &specifier.rev, - &root_tree, - )?; + let dependencies = + transform_pesde_dependencies(&manifest, &repo_url, &specifier.rev, &root_tree)?; Ok(( PackageNames::Pesde(manifest.name), diff --git a/src/source/git_index.rs b/src/source/git_index.rs index 861367f..de1f914 100644 --- a/src/source/git_index.rs +++ b/src/source/git_index.rs @@ -138,9 +138,8 @@ pub fn root_tree(repo: &gix::Repository) -> Result } }; - let refspec = match remote.refspecs(Direction::Fetch).first() { - Some(head) => head, - None => return Err(errors::TreeError::NoRefSpecs(path)), + let Some(refspec) = remote.refspecs(Direction::Fetch).first() else { + return Err(errors::TreeError::NoRefSpecs(path)); }; let spec_ref = refspec.to_ref(); @@ -153,7 +152,7 @@ pub fn root_tree(repo: &gix::Repository) -> Result let reference = match repo.find_reference(&local_ref) { Ok(reference) => reference, - Err(e) => return Err(errors::TreeError::NoReference(local_ref.to_string(), e)), + Err(e) => return Err(errors::TreeError::NoReference(local_ref.clone(), e)), }; let reference_name = reference.name().as_bstr().to_string(); diff --git a/src/source/ids.rs b/src/source/ids.rs index f3b37be..b7ef2dd 100644 --- a/src/source/ids.rs +++ b/src/source/ids.rs @@ -9,21 +9,25 @@ ser_display_deser_fromstr!(VersionId); impl VersionId { /// Creates a new version ID + #[must_use] pub fn new(version: Version, target: TargetKind) -> Self { VersionId(version, target) } /// Access the version + #[must_use] pub fn version(&self) -> &Version { &self.0 } /// Access the target + #[must_use] pub fn target(&self) -> TargetKind { self.1 } /// Returns this version ID as a string that can be used in the filesystem + #[must_use] pub fn escaped(&self) -> String { format!("{}+{}", self.0, self.1) } @@ -34,6 +38,7 @@ impl VersionId { } /// Access the parts of the version ID + #[must_use] pub fn parts(&self) -> (&Version, TargetKind) { (&self.0, self.1) } @@ -96,21 +101,25 @@ ser_display_deser_fromstr!(PackageId); impl PackageId { /// Creates a new package ID + #[must_use] pub fn new(names: PackageNames, version_id: VersionId) -> Self { PackageId(names, version_id) } /// Access the name + #[must_use] pub fn name(&self) -> &PackageNames { &self.0 } /// Access the version ID + #[must_use] pub fn version_id(&self) -> &VersionId { &self.1 } /// Access the parts of the package ID + #[must_use] pub fn parts(&self) -> (&PackageNames, &VersionId) { (&self.0, &self.1) } diff --git a/src/source/path/mod.rs b/src/source/path/mod.rs index 07f64d5..cc92b80 100644 --- a/src/source/path/mod.rs +++ b/src/source/path/mod.rs @@ -53,7 +53,7 @@ impl PackageSource for PathPackageSource { .get(&spec.index) .ok_or_else(|| { errors::ResolveError::IndexNotFound( - spec.index.to_string(), + spec.index.clone(), specifier.path.clone(), ) })? @@ -66,7 +66,7 @@ impl PackageSource for PathPackageSource { .get(&spec.index) .ok_or_else(|| { errors::ResolveError::IndexNotFound( - spec.index.to_string(), + spec.index.clone(), specifier.path.clone(), ) })? diff --git a/src/source/pesde/mod.rs b/src/source/pesde/mod.rs index 448f5eb..85e8ef2 100644 --- a/src/source/pesde/mod.rs +++ b/src/source/pesde/mod.rs @@ -27,7 +27,7 @@ use crate::{ version_matches, Project, }; use fs_err::tokio as fs; -use futures::StreamExt; +use futures::StreamExt as _; use semver::VersionReq; use tokio::{pin, task::spawn_blocking}; use tracing::instrument; @@ -68,6 +68,7 @@ impl GitBasedSource for PesdePackageSource { impl PesdePackageSource { /// Creates a new pesde package source + #[must_use] pub fn new(repo_url: Url) -> Self { Self { repo_url } } @@ -353,11 +354,13 @@ impl AllowedRegistries { } /// Whether the given URL is allowed + #[must_use] pub fn is_allowed(&self, url: Url) -> bool { self._is_allowed(&simplify_url(url)) } /// Whether the given URL is allowed, or is the same as the given URL + #[must_use] pub fn is_allowed_or_same(&self, this: Url, external: Url) -> bool { let this = simplify_url(this); let external = simplify_url(external); @@ -394,11 +397,13 @@ pub struct IndexConfig { impl IndexConfig { /// The URL of the API + #[must_use] pub fn api(&self) -> &str { self.api.as_str().trim_end_matches('/') } /// The URL to download packages from + #[must_use] pub fn download(&self) -> String { self.download .as_deref() diff --git a/src/source/refs.rs b/src/source/refs.rs index 5c18cda..6c410c2 100644 --- a/src/source/refs.rs +++ b/src/source/refs.rs @@ -24,6 +24,7 @@ pub enum PackageRefs { impl PackageRefs { /// Returns whether this package reference should be treated as a Wally package + #[must_use] pub fn is_wally_package(&self) -> bool { match self { #[cfg(feature = "wally-compat")] diff --git a/src/source/wally/mod.rs b/src/source/wally/mod.rs index 81a3b8d..2eb65dd 100644 --- a/src/source/wally/mod.rs +++ b/src/source/wally/mod.rs @@ -25,8 +25,8 @@ use relative_path::RelativePathBuf; use reqwest::header::AUTHORIZATION; use serde::Deserialize; use std::{collections::BTreeMap, path::PathBuf}; -use tokio::{io::AsyncReadExt, pin, task::spawn_blocking}; -use tokio_util::compat::FuturesAsyncReadCompatExt; +use tokio::{io::AsyncReadExt as _, pin, task::spawn_blocking}; +use tokio_util::compat::FuturesAsyncReadCompatExt as _; use tracing::instrument; pub(crate) mod compat_util; @@ -57,6 +57,7 @@ impl GitBasedSource for WallyPackageSource { impl WallyPackageSource { /// Creates a new Wally package source + #[must_use] pub fn new(repo_url: Url) -> Self { Self { repo_url } } @@ -168,12 +169,11 @@ impl PackageSource for WallyPackageSource { } Ok(None) => { tracing::debug!("{} not found in {}", specifier.name, source.repo_url); - continue; } Err(e) => return Err(e), } } - }; + } let Some(string) = string else { return Err(errors::ResolveError::NotFound(specifier.name.to_string())); @@ -202,7 +202,7 @@ impl PackageSource for WallyPackageSource { manifest.package.version, match manifest.package.realm { Realm::Server => TargetKind::RobloxServer, - _ => TargetKind::Roblox, + Realm::Shared => TargetKind::Roblox, }, ), WallyPackageRef { @@ -247,7 +247,7 @@ impl PackageSource for WallyPackageSource { } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(errors::DownloadError::ReadIndex(e)), - }; + } let (scope, name) = id.name().as_str(); diff --git a/src/source/workspace/mod.rs b/src/source/workspace/mod.rs index 542284a..c96f54a 100644 --- a/src/source/workspace/mod.rs +++ b/src/source/workspace/mod.rs @@ -12,7 +12,7 @@ use crate::{ ResolveResult, }, }; -use futures::StreamExt; +use futures::StreamExt as _; use relative_path::RelativePathBuf; use std::collections::BTreeMap; use tokio::pin; @@ -86,7 +86,7 @@ impl PackageSource for WorkspacePackageSource { .get(&spec.index) .ok_or_else(|| { errors::ResolveError::IndexNotFound( - spec.index.to_string(), + spec.index.clone(), manifest.name.to_string(), ) })? @@ -99,7 +99,7 @@ impl PackageSource for WorkspacePackageSource { .get(&spec.index) .ok_or_else(|| { errors::ResolveError::IndexNotFound( - spec.index.to_string(), + spec.index.clone(), manifest.name.to_string(), ) })? diff --git a/src/util.rs b/src/util.rs index eb83e4e..bca86e3 100644 --- a/src/util.rs +++ b/src/util.rs @@ -6,7 +6,7 @@ use serde::{ de::{MapAccess, Visitor}, Deserialize, Deserializer, Serializer, }; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use std::{ collections::{BTreeMap, HashSet}, fmt::{Display, Formatter}, @@ -29,8 +29,8 @@ pub fn authenticate_conn( next: gix::credentials::helper::NextAction::from(ctx), })) } - gix::credentials::helper::Action::Store(_) => Ok(None), - gix::credentials::helper::Action::Erase(_) => Ok(None), + gix::credentials::helper::Action::Store(_) + | gix::credentials::helper::Action::Erase(_) => Ok(None), }); } } @@ -98,6 +98,7 @@ pub fn is_default(t: &T) -> bool { t == &T::default() } +#[must_use] pub fn no_build_metadata(version: &Version) -> Version { let mut version = version.clone(); version.build = semver::BuildMetadata::EMPTY; @@ -163,21 +164,21 @@ where formatter.write_str("a map with no duplicate keys") } - fn visit_map(self, mut access: A) -> Result + fn visit_map(self, mut map: A) -> Result where A: MapAccess<'de>, { - let mut map = self.map; + let mut res = self.map; - while let Some((key, value)) = access.next_entry()? { - if map.contains_key(&key) { + while let Some((key, value)) = map.next_entry()? { + if res.contains_key(&key) { return Err(serde::de::Error::custom(format!("duplicate key `{key}`"))); } - map.insert(key, value); + res.insert(key, value); } - Ok(map) + Ok(res) } }