mirror of
https://github.com/pesde-pkg/pesde.git
synced 2024-12-12 11:00:36 +00:00
feat: use globs in includes field
This commit is contained in:
parent
dad3fad402
commit
37a7c34084
8 changed files with 189 additions and 145 deletions
|
@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
### Added
|
||||
- Print that no updates are available in `outdated` command by @daimond113
|
||||
- Support negated globs in `workspace_members` field by @daimond113
|
||||
- Make `includes` use glob patterns by @daimond113
|
||||
|
||||
## [0.5.0-rc.12] - 2024-11-22
|
||||
### Added
|
||||
|
|
16
Cargo.lock
generated
16
Cargo.lock
generated
|
@ -2376,10 +2376,18 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
name = "globset"
|
||||
version = "0.4.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "governor"
|
||||
|
@ -3654,7 +3662,7 @@ dependencies = [
|
|||
"futures",
|
||||
"git2",
|
||||
"gix",
|
||||
"glob",
|
||||
"globset",
|
||||
"indicatif",
|
||||
"indicatif-log-bridge",
|
||||
"inquire",
|
||||
|
|
|
@ -65,7 +65,7 @@ url = { version = "2.5.3", features = ["serde"] }
|
|||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
sha2 = "0.10.8"
|
||||
tempfile = "3.14.0"
|
||||
glob = "0.3.1"
|
||||
globset = { version = "0.4.15", features = ["serde1"] }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
|
||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||
|
|
|
@ -6,6 +6,7 @@ use colored::Colorize;
|
|||
use fs_err::tokio as fs;
|
||||
use pesde::{
|
||||
manifest::{target::Target, DependencyType},
|
||||
matching_globs,
|
||||
scripts::ScriptName,
|
||||
source::{
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
|
@ -21,7 +22,7 @@ use pesde::{
|
|||
};
|
||||
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||
use semver::VersionReq;
|
||||
use std::{collections::HashSet, path::Component};
|
||||
use std::{collections::HashSet, path::PathBuf};
|
||||
use tempfile::Builder;
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||
|
||||
|
@ -41,7 +42,12 @@ pub struct PublishCommand {
|
|||
}
|
||||
|
||||
impl PublishCommand {
|
||||
async fn run_impl(self, project: &Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
async fn run_impl(
|
||||
self,
|
||||
project: &Project,
|
||||
reqwest: reqwest::Client,
|
||||
is_root: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
|
@ -55,7 +61,9 @@ impl PublishCommand {
|
|||
);
|
||||
|
||||
if manifest.private {
|
||||
println!("{}", "package is private, cannot publish".red().bold());
|
||||
if !is_root {
|
||||
println!("{}", "package is private, cannot publish".red().bold());
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -97,7 +105,6 @@ impl PublishCommand {
|
|||
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
|
||||
);
|
||||
|
||||
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
|
||||
let mut display_build_files: Vec<String> = vec![];
|
||||
|
||||
let (lib_path, bin_path, target_kind) = (
|
||||
|
@ -112,64 +119,75 @@ impl PublishCommand {
|
|||
_ => None,
|
||||
};
|
||||
|
||||
if manifest.includes.insert(MANIFEST_FILE_NAME.to_string()) {
|
||||
let mut paths = matching_globs(project.package_dir(), manifest.includes.clone(), true)
|
||||
.await
|
||||
.context("failed to get included files")?;
|
||||
|
||||
if paths.insert(PathBuf::from(MANIFEST_FILE_NAME)) {
|
||||
println!(
|
||||
"{}: {MANIFEST_FILE_NAME} was not in includes, adding it",
|
||||
"{}: {MANIFEST_FILE_NAME} was not included, adding it",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if manifest.includes.remove(".git") {
|
||||
println!(
|
||||
"{}: .git was in includes, removing it",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
if paths.iter().any(|p| p.starts_with(".git")) {
|
||||
anyhow::bail!("git directory was included, please remove it");
|
||||
}
|
||||
|
||||
if !manifest.includes.iter().any(|f| {
|
||||
if !paths.iter().any(|f| {
|
||||
matches!(
|
||||
f.to_lowercase().as_str(),
|
||||
f.to_str().unwrap().to_lowercase().as_str(),
|
||||
"readme" | "readme.md" | "readme.txt"
|
||||
)
|
||||
}) {
|
||||
println!(
|
||||
"{}: no README file in includes, consider adding one",
|
||||
"{}: no README file included, consider adding one",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if !manifest.includes.iter().any(|f| f == "docs") {
|
||||
if !paths.iter().any(|p| p.starts_with("docs")) {
|
||||
println!(
|
||||
"{}: no docs directory in includes, consider adding one",
|
||||
"{}: docs directory not included, consider adding one",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if manifest.includes.remove("default.project.json") {
|
||||
println!(
|
||||
"{}: default.project.json was in includes, this should be generated by the {} script upon dependants installation",
|
||||
"warn".yellow().bold(),
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
);
|
||||
for path in &paths {
|
||||
if path
|
||||
.file_name()
|
||||
.is_some_and(|n| n == "default.project.json")
|
||||
{
|
||||
anyhow::bail!(
|
||||
"default.project.json was included at `{}`, this should be generated by the {} script upon dependants installation",
|
||||
path.display(),
|
||||
ScriptName::RobloxSyncConfigGenerator
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for ignored_path in IGNORED_FILES.iter().chain(IGNORED_DIRS.iter()) {
|
||||
if manifest.includes.remove(*ignored_path) {
|
||||
println!(
|
||||
r#"{}: {ignored_path} was in includes, removing it.
|
||||
{}: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
||||
{}: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
||||
"warn".yellow().bold(),
|
||||
"info".blue().bold(),
|
||||
"info".blue().bold()
|
||||
if paths.iter().any(|p| {
|
||||
p.components()
|
||||
.any(|ct| ct == std::path::Component::Normal(ignored_path.as_ref()))
|
||||
}) {
|
||||
anyhow::bail!(
|
||||
r#"forbidden file {ignored_path} was included.
|
||||
info: if this was a toolchain manager's manifest file, do not include it due to it possibly messing with user scripts
|
||||
info: otherwise, the file was deemed unnecessary, if you don't understand why, please contact the maintainers"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (name, path) in [("lib path", lib_path), ("bin path", bin_path)] {
|
||||
let Some(export_path) = path else { continue };
|
||||
let Some(relative_export_path) = path else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let export_path = export_path.to_path(project.package_dir());
|
||||
let export_path = relative_export_path
|
||||
.to_path(project.package_dir())
|
||||
.canonicalize()
|
||||
.context(format!("failed to canonicalize {name}"))?;
|
||||
if !export_path.exists() {
|
||||
anyhow::bail!("{name} points to non-existent file");
|
||||
}
|
||||
|
@ -191,71 +209,33 @@ impl PublishCommand {
|
|||
anyhow::bail!("{name} is not a valid Luau file: {err}");
|
||||
}
|
||||
|
||||
let first_part = export_path
|
||||
.strip_prefix(project.package_dir())
|
||||
.context(format!("{name} not within project directory"))?
|
||||
let first_part = relative_export_path
|
||||
.components()
|
||||
.next()
|
||||
.context(format!("{name} must contain at least one part"))?;
|
||||
|
||||
let first_part = match first_part {
|
||||
Component::Normal(part) => part,
|
||||
relative_path::Component::Normal(part) => part,
|
||||
_ => anyhow::bail!("{name} must be within project directory"),
|
||||
};
|
||||
|
||||
let first_part_str = first_part.to_string_lossy();
|
||||
|
||||
if manifest.includes.insert(first_part_str.to_string()) {
|
||||
if paths.insert(PathBuf::from(relative_export_path.as_str())) {
|
||||
println!(
|
||||
"{}: {name} was not in includes, adding {first_part_str}",
|
||||
"{}: {name} was not included, adding {relative_export_path}",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
|
||||
if roblox_target.as_mut().map_or(false, |build_files| {
|
||||
build_files.insert(first_part_str.to_string())
|
||||
build_files.insert(first_part.to_string())
|
||||
}) {
|
||||
println!(
|
||||
"{}: {name} was not in build files, adding {first_part_str}",
|
||||
"{}: {name} was not in build files, adding {first_part}",
|
||||
"warn".yellow().bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for included_name in &manifest.includes {
|
||||
let included_path = project.package_dir().join(included_name);
|
||||
|
||||
if !included_path.exists() {
|
||||
anyhow::bail!("included file {included_name} does not exist");
|
||||
}
|
||||
|
||||
// it's already included, and guaranteed to be a file
|
||||
if included_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if included_path.is_file() {
|
||||
display_includes.push(included_name.clone());
|
||||
|
||||
archive
|
||||
.append_file(
|
||||
included_name,
|
||||
fs::File::open(&included_path)
|
||||
.await
|
||||
.context(format!("failed to read {included_name}"))?
|
||||
.file_mut(),
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
display_includes.push(format!("{included_name}/*"));
|
||||
|
||||
archive
|
||||
.append_dir_all(included_name, &included_path)
|
||||
.await
|
||||
.context(format!("failed to include directory {included_name}"))?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(build_files) = &roblox_target {
|
||||
for build_file in build_files.iter() {
|
||||
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
|
@ -273,8 +253,8 @@ impl PublishCommand {
|
|||
anyhow::bail!("build file {build_file} does not exist");
|
||||
}
|
||||
|
||||
if !manifest.includes.contains(build_file) {
|
||||
anyhow::bail!("build file {build_file} is not in includes, please add it");
|
||||
if !paths.iter().any(|p| p.starts_with(build_file)) {
|
||||
anyhow::bail!("build file {build_file} is not included, please add it");
|
||||
}
|
||||
|
||||
if build_file_path.is_file() {
|
||||
|
@ -285,6 +265,42 @@ impl PublishCommand {
|
|||
}
|
||||
}
|
||||
|
||||
for relative_path in &paths {
|
||||
let path = project.package_dir().join(relative_path);
|
||||
|
||||
if !path.exists() {
|
||||
anyhow::bail!("included file `{}` does not exist", path.display());
|
||||
}
|
||||
|
||||
let file_name = relative_path
|
||||
.file_name()
|
||||
.context("failed to get file name")?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// it'll be included later after transformations, and is guaranteed to be a file
|
||||
if file_name.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
archive.append_dir(file_name, &path).await.context(format!(
|
||||
"failed to include directory `{}`",
|
||||
relative_path.display()
|
||||
))?;
|
||||
} else {
|
||||
archive
|
||||
.append_file(
|
||||
&relative_path,
|
||||
fs::File::open(&path)
|
||||
.await
|
||||
.context(format!("failed to read `{}`", relative_path.display()))?
|
||||
.file_mut(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
let mut has_wally = false;
|
||||
let mut has_git = false;
|
||||
|
@ -433,7 +449,11 @@ impl PublishCommand {
|
|||
|
||||
println!(
|
||||
"includes: {}",
|
||||
display_includes.into_iter().collect::<Vec<_>>().join(", ")
|
||||
paths
|
||||
.into_iter()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
if !self.dry_run
|
||||
|
@ -569,7 +589,7 @@ impl PublishCommand {
|
|||
}
|
||||
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let result = self.clone().run_impl(&project, reqwest.clone()).await;
|
||||
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
|
||||
if project.workspace_dir().is_some() {
|
||||
return result;
|
||||
} else {
|
||||
|
@ -579,7 +599,7 @@ impl PublishCommand {
|
|||
run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
let this = self.clone();
|
||||
async move { this.run_impl(&project, reqwest).await }
|
||||
async move { this.run_impl(&project, reqwest, false).await }
|
||||
})
|
||||
.await
|
||||
.map(|_| ())
|
||||
|
|
89
src/lib.rs
89
src/lib.rs
|
@ -16,7 +16,6 @@ use std::{
|
|||
collections::{HashMap, HashSet},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
/// Downloading packages
|
||||
pub mod download;
|
||||
|
@ -193,29 +192,7 @@ impl Project {
|
|||
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
||||
})?;
|
||||
|
||||
let mut members = HashSet::new();
|
||||
|
||||
for glob in &manifest.workspace_members {
|
||||
let is_removal = glob.starts_with('!');
|
||||
let glob = if is_removal { &glob[1..] } else { glob };
|
||||
|
||||
let path = dir.join(glob);
|
||||
let paths = spawn_blocking(move || {
|
||||
glob::glob(&path.as_os_str().to_string_lossy())?
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(errors::WorkspaceMembersError::Globbing)
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
if is_removal {
|
||||
for path in paths {
|
||||
members.remove(&path);
|
||||
}
|
||||
} else {
|
||||
members.extend(paths);
|
||||
}
|
||||
}
|
||||
let members = matching_globs(dir, manifest.workspace_members, false).await?;
|
||||
|
||||
Ok(stream! {
|
||||
for path in members {
|
||||
|
@ -232,6 +209,53 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
/// Gets all matching paths in a directory
|
||||
pub async fn matching_globs<P: AsRef<Path>>(
|
||||
dir: P,
|
||||
members: Vec<globset::Glob>,
|
||||
relative: bool,
|
||||
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
|
||||
let mut positive_globset = globset::GlobSetBuilder::new();
|
||||
let mut negative_globset = globset::GlobSetBuilder::new();
|
||||
|
||||
for pattern in members {
|
||||
match pattern.glob().strip_prefix('!') {
|
||||
Some(pattern) => negative_globset.add(globset::Glob::new(pattern)?),
|
||||
None => positive_globset.add(pattern),
|
||||
};
|
||||
}
|
||||
|
||||
let positive_globset = positive_globset.build()?;
|
||||
let negative_globset = negative_globset.build()?;
|
||||
|
||||
let mut read_dirs = vec![fs::read_dir(dir.as_ref().to_path_buf())];
|
||||
let mut paths = HashSet::new();
|
||||
|
||||
while let Some(read_dir) = read_dirs.pop() {
|
||||
let mut read_dir = read_dir.await?;
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
if entry.file_type().await?.is_dir() {
|
||||
read_dirs.push(fs::read_dir(path));
|
||||
continue;
|
||||
}
|
||||
|
||||
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
|
||||
|
||||
if positive_globset.is_match(relative_path) && !negative_globset.is_match(relative_path)
|
||||
{
|
||||
paths.insert(if relative {
|
||||
relative_path.to_path_buf()
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
/// Refreshes the sources asynchronously
|
||||
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
|
||||
project: &Project,
|
||||
|
@ -312,12 +336,21 @@ pub mod errors {
|
|||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// An invalid glob pattern was found
|
||||
#[error("invalid glob pattern")]
|
||||
Glob(#[from] glob::PatternError),
|
||||
/// An error occurred while globbing
|
||||
#[error("error globbing")]
|
||||
Globbing(#[from] MatchingGlobsError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when finding matching globs
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum MatchingGlobsError {
|
||||
/// An error occurred interacting with the filesystem
|
||||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// An error occurred while globbing
|
||||
#[error("error globbing")]
|
||||
Globbing(#[from] glob::GlobError),
|
||||
Globbing(#[from] globset::Error),
|
||||
}
|
||||
}
|
||||
|
|
16
src/main.rs
16
src/main.rs
|
@ -6,7 +6,7 @@ use clap::Parser;
|
|||
use fs_err::tokio as fs;
|
||||
use indicatif::MultiProgress;
|
||||
use indicatif_log_bridge::LogWrapper;
|
||||
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
|
@ -133,17 +133,9 @@ async fn run() -> anyhow::Result<()> {
|
|||
return Ok(HashSet::new());
|
||||
}
|
||||
|
||||
manifest
|
||||
.workspace_members
|
||||
.iter()
|
||||
.map(|member| path.join(member))
|
||||
.map(|p| glob::glob(&p.to_string_lossy()))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.context("invalid glob patterns")?
|
||||
.into_iter()
|
||||
.flat_map(|paths| paths.into_iter())
|
||||
.collect::<Result<HashSet<_>, _>>()
|
||||
.context("failed to expand glob patterns")
|
||||
matching_globs(path, manifest.workspace_members, false)
|
||||
.await
|
||||
.context("failed to get workspace members")
|
||||
}
|
||||
|
||||
while let Some(path) = current_path {
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use relative_path::RelativePathBuf;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use crate::{
|
||||
manifest::{overrides::OverrideKey, target::Target},
|
||||
|
@ -45,7 +44,7 @@ pub struct Manifest {
|
|||
/// The indices to use for the package
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "crate::util::serialize_gix_url_map",
|
||||
skip_serializing,
|
||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||
)]
|
||||
pub indices: BTreeMap<String, gix::Url>,
|
||||
|
@ -53,8 +52,7 @@ pub struct Manifest {
|
|||
#[cfg(feature = "wally-compat")]
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "BTreeMap::is_empty",
|
||||
serialize_with = "crate::util::serialize_gix_url_map",
|
||||
skip_serializing,
|
||||
deserialize_with = "crate::util::deserialize_gix_url_map"
|
||||
)]
|
||||
pub wally_indices: BTreeMap<String, gix::Url>,
|
||||
|
@ -63,7 +61,7 @@ pub struct Manifest {
|
|||
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
|
||||
/// The files to include in the package
|
||||
#[serde(default)]
|
||||
pub includes: BTreeSet<String>,
|
||||
pub includes: Vec<globset::Glob>,
|
||||
/// The patches to apply to packages
|
||||
#[cfg(feature = "patches")]
|
||||
#[serde(default, skip_serializing)]
|
||||
|
@ -76,7 +74,7 @@ pub struct Manifest {
|
|||
pub pesde_version: Option<Version>,
|
||||
/// A list of globs pointing to workspace members' directories
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub workspace_members: Vec<String>,
|
||||
pub workspace_members: Vec<globset::Glob>,
|
||||
/// The Roblox place of this project
|
||||
#[serde(default, skip_serializing)]
|
||||
pub place: BTreeMap<target::RobloxPlaceKind, String>,
|
||||
|
@ -90,6 +88,9 @@ pub struct Manifest {
|
|||
/// The dev dependencies of the package
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
|
||||
/// The user-defined fields of the package
|
||||
#[serde(flatten)]
|
||||
pub user_defined_fields: HashMap<String, toml::Value>,
|
||||
}
|
||||
|
||||
/// A dependency type
|
||||
|
|
13
src/util.rs
13
src/util.rs
|
@ -1,6 +1,6 @@
|
|||
use crate::AuthConfig;
|
||||
use gix::bstr::BStr;
|
||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serializer};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
|
@ -37,17 +37,6 @@ pub fn deserialize_gix_url<'de, D: Deserializer<'de>>(
|
|||
gix::Url::from_bytes(BStr::new(&s)).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
pub fn serialize_gix_url_map<S: Serializer>(
|
||||
url: &BTreeMap<String, gix::Url>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
let mut map = serializer.serialize_map(Some(url.len()))?;
|
||||
for (k, v) in url {
|
||||
map.serialize_entry(k, &v.to_bstring().to_string())?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
|
||||
pub fn deserialize_gix_url_map<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<BTreeMap<String, gix::Url>, D::Error> {
|
||||
|
|
Loading…
Reference in a new issue