feat: use globs in includes field

This commit is contained in:
daimond113 2024-11-23 22:54:28 +01:00
parent dad3fad402
commit 37a7c34084
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
8 changed files with 189 additions and 145 deletions

View file

@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added ### Added
- Print that no updates are available in `outdated` command by @daimond113 - Print that no updates are available in `outdated` command by @daimond113
- Support negated globs in `workspace_members` field by @daimond113 - Support negated globs in `workspace_members` field by @daimond113
- Make `includes` use glob patterns by @daimond113
## [0.5.0-rc.12] - 2024-11-22 ## [0.5.0-rc.12] - 2024-11-22
### Added ### Added

16
Cargo.lock generated
View file

@ -2376,10 +2376,18 @@ dependencies = [
] ]
[[package]] [[package]]
name = "glob" name = "globset"
version = "0.3.1" version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
dependencies = [
"aho-corasick",
"bstr",
"log",
"regex-automata",
"regex-syntax",
"serde",
]
[[package]] [[package]]
name = "governor" name = "governor"
@ -3654,7 +3662,7 @@ dependencies = [
"futures", "futures",
"git2", "git2",
"gix", "gix",
"glob", "globset",
"indicatif", "indicatif",
"indicatif-log-bridge", "indicatif-log-bridge",
"inquire", "inquire",

View file

@ -65,7 +65,7 @@ url = { version = "2.5.3", features = ["serde"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
sha2 = "0.10.8" sha2 = "0.10.8"
tempfile = "3.14.0" tempfile = "3.14.0"
glob = "0.3.1" globset = { version = "0.4.15", features = ["serde1"] }
fs-err = { version = "3.0.0", features = ["tokio"] } fs-err = { version = "3.0.0", features = ["tokio"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding # TODO: remove this when gitoxide adds support for: committing, pushing, adding

View file

@ -6,6 +6,7 @@ use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use pesde::{ use pesde::{
manifest::{target::Target, DependencyType}, manifest::{target::Target, DependencyType},
matching_globs,
scripts::ScriptName, scripts::ScriptName,
source::{ source::{
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource}, pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
@ -21,7 +22,7 @@ use pesde::{
}; };
use reqwest::{header::AUTHORIZATION, StatusCode}; use reqwest::{header::AUTHORIZATION, StatusCode};
use semver::VersionReq; use semver::VersionReq;
use std::{collections::HashSet, path::Component}; use std::{collections::HashSet, path::PathBuf};
use tempfile::Builder; use tempfile::Builder;
use tokio::io::{AsyncSeekExt, AsyncWriteExt}; use tokio::io::{AsyncSeekExt, AsyncWriteExt};
@ -41,7 +42,12 @@ pub struct PublishCommand {
} }
impl PublishCommand { impl PublishCommand {
async fn run_impl(self, project: &Project, reqwest: reqwest::Client) -> anyhow::Result<()> { async fn run_impl(
self,
project: &Project,
reqwest: reqwest::Client,
is_root: bool,
) -> anyhow::Result<()> {
let mut manifest = project let mut manifest = project
.deser_manifest() .deser_manifest()
.await .await
@ -55,7 +61,9 @@ impl PublishCommand {
); );
if manifest.private { if manifest.private {
println!("{}", "package is private, cannot publish".red().bold()); if !is_root {
println!("{}", "package is private, cannot publish".red().bold());
}
return Ok(()); return Ok(());
} }
@ -97,7 +105,6 @@ impl PublishCommand {
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best), async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
); );
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
let mut display_build_files: Vec<String> = vec![]; let mut display_build_files: Vec<String> = vec![];
let (lib_path, bin_path, target_kind) = ( let (lib_path, bin_path, target_kind) = (
@ -112,64 +119,75 @@ impl PublishCommand {
_ => None, _ => None,
}; };
if manifest.includes.insert(MANIFEST_FILE_NAME.to_string()) { let mut paths = matching_globs(project.package_dir(), manifest.includes.clone(), true)
.await
.context("failed to get included files")?;
if paths.insert(PathBuf::from(MANIFEST_FILE_NAME)) {
println!( println!(
"{}: {MANIFEST_FILE_NAME} was not in includes, adding it", "{}: {MANIFEST_FILE_NAME} was not included, adding it",
"warn".yellow().bold() "warn".yellow().bold()
); );
} }
if manifest.includes.remove(".git") { if paths.iter().any(|p| p.starts_with(".git")) {
println!( anyhow::bail!("git directory was included, please remove it");
"{}: .git was in includes, removing it",
"warn".yellow().bold()
);
} }
if !manifest.includes.iter().any(|f| { if !paths.iter().any(|f| {
matches!( matches!(
f.to_lowercase().as_str(), f.to_str().unwrap().to_lowercase().as_str(),
"readme" | "readme.md" | "readme.txt" "readme" | "readme.md" | "readme.txt"
) )
}) { }) {
println!( println!(
"{}: no README file in includes, consider adding one", "{}: no README file included, consider adding one",
"warn".yellow().bold() "warn".yellow().bold()
); );
} }
if !manifest.includes.iter().any(|f| f == "docs") { if !paths.iter().any(|p| p.starts_with("docs")) {
println!( println!(
"{}: no docs directory in includes, consider adding one", "{}: docs directory not included, consider adding one",
"warn".yellow().bold() "warn".yellow().bold()
); );
} }
if manifest.includes.remove("default.project.json") { for path in &paths {
println!( if path
"{}: default.project.json was in includes, this should be generated by the {} script upon dependants installation", .file_name()
"warn".yellow().bold(), .is_some_and(|n| n == "default.project.json")
ScriptName::RobloxSyncConfigGenerator {
); anyhow::bail!(
"default.project.json was included at `{}`, this should be generated by the {} script upon dependants installation",
path.display(),
ScriptName::RobloxSyncConfigGenerator
);
}
} }
for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) { for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) {
if manifest.includes.remove(*ignored_path) { if paths.iter().any(|p| {
println!( p.components()
r#"{}: {ignored_path} was in includes, removing it. .any(|ct| ct == std::path::Component::Normal(ignored_path.as_ref()))
{}: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts }) {
{}: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#, anyhow::bail!(
"warn".yellow().bold(), r#"forbidden file {ignored_path} was included.
"info".blue().bold(), info: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
"info".blue().bold() info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
); );
} }
} }
for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] { for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] {
let Some(export_path) = path else { continue }; let Some(relative_export_path) = path else {
continue;
};
let export_path = export_path.to_path(project.package_dir()); let export_path = relative_export_path
.to_path(project.package_dir())
.canonicalize()
.context(format!("failed to canonicalize {name}"))?;
if !export_path.exists() { if !export_path.exists() {
anyhow::bail!("{name} points to non-existent file"); anyhow::bail!("{name} points to non-existent file");
} }
@ -191,71 +209,33 @@ impl PublishCommand {
anyhow::bail!("{name} is not a valid Luau file: {err}"); anyhow::bail!("{name} is not a valid Luau file: {err}");
} }
let first_part = export_path let first_part = relative_export_path
.strip_prefix(project.package_dir())
.context(format!("{name} not within project directory"))?
.components() .components()
.next() .next()
.context(format!("{name} must contain at least one part"))?; .context(format!("{name} must contain at least one part"))?;
let first_part = match first_part { let first_part = match first_part {
Component::Normal(part) => part, relative_path::Component::Normal(part) => part,
_ => anyhow::bail!("{name} must be within project directory"), _ => anyhow::bail!("{name} must be within project directory"),
}; };
let first_part_str = first_part.to_string_lossy(); if paths.insert(PathBuf::from(relative_export_path.as_str())) {
if manifest.includes.insert(first_part_str.to_string()) {
println!( println!(
"{}: {name} was not in includes, adding {first_part_str}", "{}: {name} was not included, adding {relative_export_path}",
"warn".yellow().bold() "warn".yellow().bold()
); );
} }
if roblox_target.as_mut().map_or(false, |build_files| { if roblox_target.as_mut().map_or(false, |build_files| {
build_files.insert(first_part_str.to_string()) build_files.insert(first_part.to_string())
}) { }) {
println!( println!(
"{}: {name} was not in build files, adding {first_part_str}", "{}: {name} was not in build files, adding {first_part}",
"warn".yellow().bold() "warn".yellow().bold()
); );
} }
} }
for included_name in &manifest.includes {
let included_path = project.package_dir().join(included_name);
if !included_path.exists() {
anyhow::bail!("included file {included_name} does not exist");
}
// it's already included, and guaranteed to be a file
if included_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
continue;
}
if included_path.is_file() {
display_includes.push(included_name.clone());
archive
.append_file(
included_name,
fs::File::open(&included_path)
.await
.context(format!("failed to read {included_name}"))?
.file_mut(),
)
.await?;
} else {
display_includes.push(format!("{included_name}/*"));
archive
.append_dir_all(included_name, &included_path)
.await
.context(format!("failed to include directory {included_name}"))?;
}
}
if let Some(build_files) = &roblox_target { if let Some(build_files) = &roblox_target {
for build_file in build_files.iter() { for build_file in build_files.iter() {
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) { if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
@ -273,8 +253,8 @@ impl PublishCommand {
anyhow::bail!("build file {build_file} does not exist"); anyhow::bail!("build file {build_file} does not exist");
} }
if !manifest.includes.contains(build_file) { if !paths.iter().any(|p| p.starts_with(build_file)) {
anyhow::bail!("build file {build_file} is not in includes, please add it"); anyhow::bail!("build file {build_file} is not included, please add it");
} }
if build_file_path.is_file() { if build_file_path.is_file() {
@ -285,6 +265,42 @@ impl PublishCommand {
} }
} }
for relative_path in &paths {
let path = project.package_dir().join(relative_path);
if !path.exists() {
anyhow::bail!("included file `{}` does not exist", path.display());
}
let file_name = relative_path
.file_name()
.context("failed to get file name")?
.to_string_lossy()
.to_string();
// it'll be included later after transformations, and is guaranteed to be a file
if file_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
continue;
}
if path.is_dir() {
archive.append_dir(file_name, &path).await.context(format!(
"failed to include directory `{}`",
relative_path.display()
))?;
} else {
archive
.append_file(
&relative_path,
fs::File::open(&path)
.await
.context(format!("failed to read `{}`", relative_path.display()))?
.file_mut(),
)
.await?;
}
}
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
let mut has_wally = false; let mut has_wally = false;
let mut has_git = false; let mut has_git = false;
@ -433,7 +449,11 @@ impl PublishCommand {
println!( println!(
"includes: {}", "includes: {}",
display_includes.into_iter().collect::<Vec<_>>().join(", ") paths
.into_iter()
.map(|p| p.to_string_lossy().to_string())
.collect::<Vec<_>>()
.join(", ")
); );
if !self.dry_run if !self.dry_run
@ -569,7 +589,7 @@ impl PublishCommand {
} }
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let result = self.clone().run_impl(&project, reqwest.clone()).await; let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
if project.workspace_dir().is_some() { if project.workspace_dir().is_some() {
return result; return result;
} else { } else {
@ -579,7 +599,7 @@ impl PublishCommand {
run_on_workspace_members(&project, |project| { run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
let this = self.clone(); let this = self.clone();
async move { this.run_impl(&project, reqwest).await } async move { this.run_impl(&project, reqwest, false).await }
}) })
.await .await
.map(|_| ()) .map(|_| ())

View file

@ -16,7 +16,6 @@ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tokio::task::spawn_blocking;
/// Downloading packages /// Downloading packages
pub mod download; pub mod download;
@ -193,29 +192,7 @@ impl Project {
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e)) errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
})?; })?;
let mut members = HashSet::new(); let members = matching_globs(dir, manifest.workspace_members, false).await?;
for glob in &manifest.workspace_members {
let is_removal = glob.starts_with('!');
let glob = if is_removal { &glob[1..] } else { glob };
let path = dir.join(glob);
let paths = spawn_blocking(move || {
glob::glob(&path.as_os_str().to_string_lossy())?
.collect::<Result<Vec<_>, _>>()
.map_err(errors::WorkspaceMembersError::Globbing)
})
.await
.unwrap()?;
if is_removal {
for path in paths {
members.remove(&path);
}
} else {
members.extend(paths);
}
}
Ok(stream! { Ok(stream! {
for path in members { for path in members {
@ -232,6 +209,53 @@ impl Project {
} }
} }
/// Gets all matching paths in a directory
pub async fn matching_globs<P: AsRef<Path>>(
dir: P,
members: Vec<globset::Glob>,
relative: bool,
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
let mut positive_globset = globset::GlobSetBuilder::new();
let mut negative_globset = globset::GlobSetBuilder::new();
for pattern in members {
match pattern.glob().strip_prefix('!') {
Some(pattern) => negative_globset.add(globset::Glob::new(pattern)?),
None => positive_globset.add(pattern),
};
}
let positive_globset = positive_globset.build()?;
let negative_globset = negative_globset.build()?;
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf())];
let mut paths = HashSet::new();
while let Some(read_dir) = read_dirs.pop() {
let mut read_dir = read_dir.await?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
if entry.file_type().await?.is_dir() {
read_dirs.push(fs::read_dir(path));
continue;
}
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
if positive_globset.is_match(relative_path) && !negative_globset.is_match(relative_path)
{
paths.insert(if relative {
relative_path.to_path_buf()
} else {
path.to_path_buf()
});
}
}
}
Ok(paths)
}
/// Refreshes the sources asynchronously /// Refreshes the sources asynchronously
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>( pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
project: &Project, project: &Project,
@ -312,12 +336,21 @@ pub mod errors {
#[error("error interacting with the filesystem")] #[error("error interacting with the filesystem")]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
/// An invalid glob pattern was found /// An error occurred while globbing
#[error("invalid glob pattern")] #[error("error globbing")]
Glob(#[from] glob::PatternError), Globbing(#[from] MatchingGlobsError),
}
/// Errors that can occur when finding matching globs
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum MatchingGlobsError {
/// An error occurred interacting with the filesystem
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
/// An error occurred while globbing /// An error occurred while globbing
#[error("error globbing")] #[error("error globbing")]
Globbing(#[from] glob::GlobError), Globbing(#[from] globset::Error),
} }
} }

View file

@ -6,7 +6,7 @@ use clap::Parser;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use indicatif::MultiProgress; use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper; use indicatif_log_bridge::LogWrapper;
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME}; use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{ use std::{
collections::HashSet, collections::HashSet,
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -133,17 +133,9 @@ async fn run() -> anyhow::Result<()> {
return Ok(HashSet::new()); return Ok(HashSet::new());
} }
manifest matching_globs(path, manifest.workspace_members, false)
.workspace_members .await
.iter() .context("failed to get workspace members")
.map(|member| path.join(member))
.map(|p| glob::glob(&p.to_string_lossy()))
.collect::<Result<Vec<_>, _>>()
.context("invalid glob patterns")?
.into_iter()
.flat_map(|paths| paths.into_iter())
.collect::<Result<HashSet<_>, _>>()
.context("failed to expand glob patterns")
} }
while let Some(path) = current_path { while let Some(path) = current_path {

View file

@ -1,8 +1,7 @@
use std::collections::{BTreeMap, BTreeSet};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use semver::Version; use semver::Version;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use crate::{ use crate::{
manifest::{overrides::OverrideKey, target::Target}, manifest::{overrides::OverrideKey, target::Target},
@ -45,7 +44,7 @@ pub struct Manifest {
/// The indices to use for the package /// The indices to use for the package
#[serde( #[serde(
default, default,
serialize_with = "crate::util::serialize_gix_url_map", skip_serializing,
deserialize_with = "crate::util::deserialize_gix_url_map" deserialize_with = "crate::util::deserialize_gix_url_map"
)] )]
pub indices: BTreeMap<String, gix::Url>, pub indices: BTreeMap<String, gix::Url>,
@ -53,8 +52,7 @@ pub struct Manifest {
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
#[serde( #[serde(
default, default,
skip_serializing_if = "BTreeMap::is_empty", skip_serializing,
serialize_with = "crate::util::serialize_gix_url_map",
deserialize_with = "crate::util::deserialize_gix_url_map" deserialize_with = "crate::util::deserialize_gix_url_map"
)] )]
pub wally_indices: BTreeMap<String, gix::Url>, pub wally_indices: BTreeMap<String, gix::Url>,
@ -63,7 +61,7 @@ pub struct Manifest {
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>, pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
/// The files to include in the package /// The files to include in the package
#[serde(default)] #[serde(default)]
pub includes: BTreeSet<String>, pub includes: Vec<globset::Glob>,
/// The patches to apply to packages /// The patches to apply to packages
#[cfg(feature = "patches")] #[cfg(feature = "patches")]
#[serde(default, skip_serializing)] #[serde(default, skip_serializing)]
@ -76,7 +74,7 @@ pub struct Manifest {
pub pesde_version: Option<Version>, pub pesde_version: Option<Version>,
/// A list of globs pointing to workspace members' directories /// A list of globs pointing to workspace members' directories
#[serde(default, skip_serializing_if = "Vec::is_empty")] #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub workspace_members: Vec<String>, pub workspace_members: Vec<globset::Glob>,
/// The Roblox place of this project /// The Roblox place of this project
#[serde(default, skip_serializing)] #[serde(default, skip_serializing)]
pub place: BTreeMap<target::RobloxPlaceKind, String>, pub place: BTreeMap<target::RobloxPlaceKind, String>,
@ -90,6 +88,9 @@ pub struct Manifest {
/// The dev dependencies of the package /// The dev dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")] #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>, pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
/// The user-defined fields of the package
#[serde(flatten)]
pub user_defined_fields: HashMap<String, toml::Value>,
} }
/// A dependency type /// A dependency type

View file

@ -1,6 +1,6 @@
use crate::AuthConfig; use crate::AuthConfig;
use gix::bstr::BStr; use gix::bstr::BStr;
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serializer}; use serde::{Deserialize, Deserializer, Serializer};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -37,17 +37,6 @@ pub fn deserialize_gix_url<'de, D: Deserializer<'de>>(
gix::Url::from_bytes(BStr::new(&s)).map_err(serde::de::Error::custom) gix::Url::from_bytes(BStr::new(&s)).map_err(serde::de::Error::custom)
} }
pub fn serialize_gix_url_map<S: Serializer>(
url: &BTreeMap<String, gix::Url>,
serializer: S,
) -> Result<S::Ok, S::Error> {
let mut map = serializer.serialize_map(Some(url.len()))?;
for (k, v) in url {
map.serialize_entry(k, &v.to_bstring().to_string())?;
}
map.end()
}
pub fn deserialize_gix_url_map<'de, D: Deserializer<'de>>( pub fn deserialize_gix_url_map<'de, D: Deserializer<'de>>(
deserializer: D, deserializer: D,
) -> Result<BTreeMap<String, gix::Url>, D::Error> { ) -> Result<BTreeMap<String, gix::Url>, D::Error> {