refactor: use fs-err for fs operations

This commit is contained in:
daimond113 2024-11-01 20:57:32 +01:00
parent c9dc788056
commit 09820e322c
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
30 changed files with 129 additions and 118 deletions

View file

@ -9,6 +9,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
### Changed
- Switched to fs-err for better errors with file system operations by @daimond113
## [0.5.0-rc.7] - 2024-10-30
### Added
- New website by @lukadev-0

11
Cargo.lock generated
View file

@ -1426,6 +1426,15 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "fs-err"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bb60e7409f34ef959985bc9d9c5ee8f5db24ee46ed9775850548021710f807f"
dependencies = [
"autocfg",
]
[[package]]
name = "fs4"
version = "0.8.4"
@ -3557,6 +3566,7 @@ dependencies = [
"colored",
"dirs",
"flate2",
"fs-err",
"full_moon",
"git2",
"gix",
@ -3600,6 +3610,7 @@ dependencies = [
"convert_case 0.6.0",
"dotenvy",
"flate2",
"fs-err",
"futures",
"git2",
"gix",

View file

@ -26,7 +26,8 @@ bin = [
"open",
"gix/worktree-mutation",
"serde_json",
"winreg"
"winreg",
"fs-err/expose_original_error"
]
wally-compat = ["zip", "serde_json"]
patches = ["git2"]
@ -60,6 +61,7 @@ chrono = { version = "0.4.38", features = ["serde"] }
sha2 = "0.10.8"
tempfile = "3.13.0"
glob = "0.3.1"
fs-err = "3.0.0"
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.19.0", optional = true }

View file

@ -18,6 +18,7 @@ chrono = { version = "0.4.38", features = ["serde"] }
url = "2.5.2"
futures = "0.3.31"
tempfile = "3.13.0"
fs-err = "3.0.0"
git2 = "0.19.0"
gix = { version = "0.67.0", default-features = false, features = [

View file

@ -8,7 +8,6 @@ use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
fs::read_dir,
io::{Cursor, Read, Write},
};
use tar::Archive;
@ -102,7 +101,7 @@ pub async fn publish_package(
let mut docs = BTreeSet::new();
let mut docs_pages = HashMap::new();
for entry in read_dir(package_dir.path())? {
for entry in fs_err::read_dir(package_dir.path())? {
let entry = entry?;
let file_name = entry
.file_name()
@ -118,7 +117,7 @@ pub async fn publish_package(
if file_name == "docs" {
let mut stack = vec![(
BTreeSet::new(),
read_dir(entry.path())?,
fs_err::read_dir(entry.path())?,
None::<DocEntryInfo>,
)];
@ -134,7 +133,7 @@ pub async fn publish_package(
if entry.file_type()?.is_dir() {
stack.push((
BTreeSet::new(),
read_dir(entry.path())?,
fs_err::read_dir(entry.path())?,
Some(DocEntryInfo {
label: Some(file_name.to_case(Case::Title)),
..Default::default()
@ -144,7 +143,7 @@ pub async fn publish_package(
}
if file_name == "_category_.json" {
let info = std::fs::read_to_string(entry.path())?;
let info = fs_err::read_to_string(entry.path())?;
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
let old_info = category_info.take();
info.label = info.label.or(old_info.and_then(|i| i.label));
@ -156,7 +155,7 @@ pub async fn publish_package(
continue;
};
let content = std::fs::read_to_string(entry.path())?;
let content = fs_err::read_to_string(entry.path())?;
let content = content.trim();
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
@ -246,7 +245,7 @@ pub async fn publish_package(
}
if file_name == MANIFEST_FILE_NAME {
let content = std::fs::read_to_string(entry.path())?;
let content = fs_err::read_to_string(entry.path())?;
manifest = Some(toml::de::from_str(&content)?);
} else if file_name
@ -259,7 +258,7 @@ pub async fn publish_package(
return Err(Error::InvalidArchive);
}
let file = std::fs::File::open(entry.path())?;
let file = fs_err::File::open(entry.path())?;
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best());
let mut bytes = vec![];

View file

@ -6,7 +6,7 @@ use actix_web::{
web, App, HttpServer,
};
use log::info;
use std::{env::current_dir, fs::create_dir_all, path::PathBuf, sync::Mutex};
use std::{env::current_dir, path::PathBuf, sync::Mutex};
use pesde::{
source::{pesde::PesdePackageSource, traits::PackageSource},
@ -86,7 +86,7 @@ async fn run() -> std::io::Result<()> {
let cwd = current_dir().unwrap();
let data_dir = cwd.join("data");
create_dir_all(&data_dir).unwrap();
fs_err::create_dir_all(&data_dir).unwrap();
let project = Project::new(
&cwd,

View file

@ -6,7 +6,6 @@ use actix_web::{
use pesde::{names::PackageName, source::version_id::VersionId};
use std::{
fmt::Display,
fs::create_dir_all,
path::{Path, PathBuf},
};
@ -16,7 +15,7 @@ pub struct FSStorage {
}
fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
Ok(match std::fs::read(path) {
Ok(match fs_err::read(path) {
Ok(contents) => HttpResponse::Ok()
.append_header((CONTENT_TYPE, content_type))
.append_header((CONTENT_ENCODING, "gzip"))
@ -41,9 +40,9 @@ impl StorageImpl for FSStorage {
.join(name)
.join(version.version().to_string())
.join(version.target().to_string());
create_dir_all(&path)?;
fs_err::create_dir_all(&path)?;
std::fs::write(path.join("pkg.tar.gz"), &contents)?;
fs_err::write(path.join("pkg.tar.gz"), &contents)?;
Ok(())
}
@ -79,9 +78,9 @@ impl StorageImpl for FSStorage {
.join(name)
.join(version.version().to_string())
.join(version.target().to_string());
create_dir_all(&path)?;
fs_err::create_dir_all(&path)?;
std::fs::write(path.join("readme.gz"), &contents)?;
fs_err::write(path.join("readme.gz"), &contents)?;
Ok(())
}
@ -105,9 +104,9 @@ impl StorageImpl for FSStorage {
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
let path = self.root.join("Doc");
create_dir_all(&path)?;
fs_err::create_dir_all(&path)?;
std::fs::write(path.join(format!("{doc_hash}.gz")), &contents)?;
fs_err::write(path.join(format!("{doc_hash}.gz")), &contents)?;
Ok(())
}

View file

@ -31,7 +31,7 @@ impl ScriptsRepoCommand {
config.scripts_repo = repo.clone();
write_config(&config)?;
std::fs::remove_dir_all(home_dir()?.join("scripts"))
fs_err::remove_dir_all(home_dir()?.join("scripts"))
.context("failed to remove scripts directory")?;
println!("scripts repo set to: {repo}");

View file

@ -78,7 +78,7 @@ impl ExecuteCommand {
let bin_path = target.bin_path().context("package has no binary export")?;
let tmp_dir = project.cas_dir().join(".tmp");
std::fs::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
fs_err::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
let tempdir =
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;

View file

@ -125,9 +125,9 @@ impl InitCommand {
let folder = project
.package_dir()
.join(concat!(".", env!("CARGO_PKG_NAME")));
std::fs::create_dir_all(&folder).context("failed to create scripts folder")?;
fs_err::create_dir_all(&folder).context("failed to create scripts folder")?;
std::fs::write(
fs_err::write(
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",
@ -137,7 +137,7 @@ impl InitCommand {
.context("failed to write sync config generator script file")?;
#[cfg(feature = "wally-compat")]
std::fs::write(
fs_err::write(
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",

View file

@ -154,7 +154,7 @@ impl InstallCommand {
if deleted_folders.insert(folder.to_string()) {
log::debug!("deleting the {folder} folder");
if let Some(e) = std::fs::remove_dir_all(project.package_dir().join(&folder))
if let Some(e) = fs_err::remove_dir_all(project.package_dir().join(&folder))
.err()
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
{
@ -245,7 +245,7 @@ impl InstallCommand {
}
let bin_file = bin_folder.join(alias);
std::fs::write(&bin_file, bin_link_file(alias))
fs_err::write(&bin_file, bin_link_file(alias))
.context("failed to write bin link file")?;
make_executable(&bin_file).context("failed to make bin link executable")?;
@ -253,7 +253,7 @@ impl InstallCommand {
#[cfg(windows)]
{
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
std::fs::copy(
fs_err::copy(
std::env::current_exe().context("failed to get current executable path")?,
&bin_file,
)

View file

@ -45,7 +45,7 @@ impl PatchCommand {
.join(name.escaped())
.join(version_id.escaped())
.join(chrono::Utc::now().timestamp().to_string());
std::fs::create_dir_all(&directory)?;
fs_err::create_dir_all(&directory)?;
source
.download(&node.node.pkg_ref, &project, &reqwest)?

View file

@ -53,10 +53,10 @@ impl PatchCommitCommand {
.context("failed to parse manifest")?;
let patch = create_patch(&self.directory).context("failed to create patch")?;
std::fs::remove_dir_all(self.directory).context("failed to remove patch directory")?;
fs_err::remove_dir_all(self.directory).context("failed to remove patch directory")?;
let patches_dir = project.package_dir().join("patches");
std::fs::create_dir_all(&patches_dir).context("failed to create patches directory")?;
fs_err::create_dir_all(&patches_dir).context("failed to create patches directory")?;
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
@ -65,7 +65,7 @@ impl PatchCommitCommand {
anyhow::bail!("patch file already exists: {}", patch_file.display());
}
std::fs::write(&patch_file, patch).context("failed to write patch file")?;
fs_err::write(&patch_file, patch).context("failed to write patch file")?;
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[&name.to_string()][&version_id.to_string()] =

View file

@ -176,7 +176,7 @@ impl PublishCommand {
}
let contents =
std::fs::read_to_string(&export_path).context(format!("failed to read {name}"))?;
fs_err::read_to_string(&export_path).context(format!("failed to read {name}"))?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter()
@ -235,8 +235,9 @@ impl PublishCommand {
archive.append_file(
included_name,
&mut std::fs::File::open(&included_path)
.context(format!("failed to read {included_name}"))?,
fs_err::File::open(&included_path)
.context(format!("failed to read {included_name}"))?
.file_mut(),
)?;
} else {
display_includes.push(format!("{included_name}/*"));
@ -340,7 +341,7 @@ impl PublishCommand {
.context("failed to get workspace directory")?,
)
.join(MANIFEST_FILE_NAME);
let manifest = std::fs::read_to_string(&manifest)
let manifest = fs_err::read_to_string(&manifest)
.context("failed to read workspace package manifest")?;
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
.context("failed to parse workspace package manifest")?;
@ -489,7 +490,7 @@ impl PublishCommand {
}
if self.dry_run {
std::fs::write("package.tar.gz", archive)?;
fs_err::write("package.tar.gz", archive)?;
println!(
"{}",

View file

@ -39,7 +39,7 @@ impl Default for CliConfig {
}
pub fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match std::fs::read_to_string(home_dir()?.join("config.toml")) {
let config_string = match fs_err::read_to_string(home_dir()?.join("config.toml")) {
Ok(config_string) => config_string,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Ok(CliConfig::default());
@ -54,7 +54,7 @@ pub fn read_config() -> anyhow::Result<CliConfig> {
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> {
let config_string = toml::to_string(config).context("failed to serialize config")?;
std::fs::write(home_dir()?.join("config.toml"), config_string)
fs_err::write(home_dir()?.join("config.toml"), config_string)
.context("failed to write config file")?;
Ok(())

View file

@ -6,11 +6,11 @@ pub fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
use anyhow::Context;
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&_path)
let mut perms = fs_err::metadata(&_path)
.context("failed to get bin link file metadata")?
.permissions();
perms.set_mode(perms.mode() | 0o111);
std::fs::set_permissions(&_path, perms)
fs_err::set_permissions(&_path, perms)
.context("failed to set bin link file permissions")?;
}

View file

@ -10,7 +10,6 @@ use pesde::{
use relative_path::RelativePathBuf;
use std::{
collections::{BTreeMap, HashSet},
fs::create_dir_all,
path::PathBuf,
str::FromStr,
sync::Arc,
@ -35,7 +34,7 @@ pub fn home_dir() -> anyhow::Result<PathBuf> {
pub fn bin_dir() -> anyhow::Result<PathBuf> {
let bin_dir = home_dir()?.join("bin");
create_dir_all(&bin_dir).context("failed to create bin folder")?;
fs_err::create_dir_all(&bin_dir).context("failed to create bin folder")?;
Ok(bin_dir)
}

View file

@ -86,7 +86,7 @@ fn update_repo<P: AsRef<Path>>(
.write(gix::index::write::Options::default())
.context("failed to write index")?;
} else {
std::fs::create_dir_all(path).context(format!("failed to create {name} directory"))?;
fs_err::create_dir_all(path).context(format!("failed to create {name} directory"))?;
gix::prepare_clone(url, path)
.context(format!("failed to prepare {name} repository clone"))?

View file

@ -3,7 +3,7 @@ use colored::Colorize;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::{fs::create_dir_all, io::Read, path::PathBuf};
use std::{io::Read, path::PathBuf};
use crate::cli::{
bin_dir,
@ -157,7 +157,7 @@ pub fn get_or_download_version(
version: &Version,
) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions");
create_dir_all(&path).context("failed to create versions directory")?;
fs_err::create_dir_all(&path).context("failed to create versions directory")?;
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
@ -172,11 +172,11 @@ pub fn get_or_download_version(
}
if is_requested_version {
std::fs::copy(std::env::current_exe()?, &path)
fs_err::copy(std::env::current_exe()?, &path)
.context("failed to copy current executable to version directory")?;
} else {
let bytes = download_github_release(reqwest, version)?;
std::fs::write(&path, bytes).context("failed to write downloaded version file")?;
fs_err::write(&path, bytes).context("failed to write downloaded version file")?;
}
make_executable(&path).context("failed to make downloaded version executable")?;
@ -190,9 +190,9 @@ pub fn get_or_download_version(
pub fn max_installed_version() -> anyhow::Result<Version> {
let versions_dir = home_dir()?.join("versions");
create_dir_all(&versions_dir).context("failed to create versions directory")?;
fs_err::create_dir_all(&versions_dir).context("failed to create versions directory")?;
let max_version = std::fs::read_dir(versions_dir)
let max_version = fs_err::read_dir(versions_dir)
.context("failed to read versions directory")?
.collect::<Result<Vec<_>, _>>()?
.into_iter()
@ -228,7 +228,7 @@ pub fn update_bin_exe() -> anyhow::Result<()> {
std::env::consts::EXE_SUFFIX
));
std::fs::copy(std::env::current_exe()?, &copy_to)
fs_err::copy(std::env::current_exe()?, &copy_to)
.context("failed to copy executable to bin folder")?;
make_executable(&copy_to)

View file

@ -7,9 +7,9 @@ use crate::{
},
Project, PACKAGES_CONTAINER_NAME,
};
use fs_err::create_dir_all;
use std::{
collections::HashSet,
fs::create_dir_all,
sync::{mpsc::Receiver, Arc, Mutex},
};
@ -133,7 +133,7 @@ pub mod errors {
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
/// Error interacting with the filesystem
#[error("error interacting with filesystem")]
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
/// Error downloading a package

View file

@ -138,31 +138,31 @@ impl Project {
/// Read the manifest file
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
Ok(string)
}
/// Deserialize the manifest file
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
Ok(toml::from_str(&string)?)
}
/// Write the manifest file
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
std::fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
fs_err::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
}
/// Deserialize the lockfile
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = std::fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
let string = fs_err::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
Ok(toml::from_str(&string)?)
}
/// Write the lockfile
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> {
let string = toml::to_string(&lockfile)?;
std::fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
fs_err::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
Ok(())
}
@ -172,8 +172,8 @@ impl Project {
dir: P,
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> {
let dir = dir.as_ref().to_path_buf();
let manifest = std::fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(dir.to_path_buf(), e))?;
let manifest = fs_err::read_to_string(dir.join(MANIFEST_FILE_NAME))
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
})?;
@ -191,8 +191,8 @@ impl Project {
members
.into_iter()
.map(|path| {
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(path.clone(), e))?;
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
})?;
@ -251,8 +251,8 @@ pub mod errors {
#[non_exhaustive]
pub enum WorkspaceMembersError {
/// The manifest file could not be found
#[error("missing manifest file at {0}")]
ManifestMissing(PathBuf, #[source] std::io::Error),
#[error("missing manifest file")]
ManifestMissing(#[source] std::io::Error),
/// An error occurred deserializing the manifest file
#[error("error deserializing manifest file at {0}")]

View file

@ -9,7 +9,6 @@ use crate::{
use std::{
collections::BTreeMap,
ffi::OsStr,
fs::create_dir_all,
path::{Path, PathBuf},
};
@ -18,14 +17,14 @@ pub mod generator;
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
let p = path.as_ref();
create_dir_all(p)?;
fs_err::create_dir_all(p)?;
p.canonicalize()
}
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1;
std::fs::hard_link(cas_path, destination)
fs_err::hard_link(cas_path, destination)
}
impl Project {
@ -58,7 +57,7 @@ impl Project {
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
let lib_file = lib_file.to_path(&container_folder);
let contents = match std::fs::read_to_string(&lib_file) {
let contents = match fs_err::read_to_string(&lib_file) {
Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Err(errors::LinkingError::LibFileNotFound(

View file

@ -11,7 +11,6 @@ use indicatif_log_bridge::LogWrapper;
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{
collections::HashSet,
fs::{create_dir_all, hard_link, remove_file},
path::{Path, PathBuf},
thread::spawn,
};
@ -45,8 +44,8 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
let try_path = curr_path.join(temp_file_name);
if hard_link(file_to_try.path(), &try_path).is_ok() {
if let Err(err) = remove_file(&try_path) {
if fs_err::hard_link(file_to_try.path(), &try_path).is_ok() {
if let Err(err) = fs_err::remove_file(&try_path) {
log::warn!(
"failed to remove temporary file at {}: {err}",
try_path.display()
@ -109,7 +108,7 @@ fn run() -> anyhow::Result<()> {
let mut workspace_dir = None::<PathBuf>;
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
.context("failed to read manifest")?;
let manifest: pesde::manifest::Manifest =
toml::from_str(&manifest).context("failed to parse manifest")?;
@ -180,7 +179,7 @@ fn run() -> anyhow::Result<()> {
let home_dir = home_dir()?;
let data_dir = home_dir.join("data");
create_dir_all(&data_dir).expect("failed to create data directory");
fs_err::create_dir_all(&data_dir).expect("failed to create data directory");
let cas_dir = get_linkable_dir(&project_root_dir).join(HOME_DIR);

View file

@ -2,9 +2,10 @@ use crate::{
lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME,
PACKAGES_CONTAINER_NAME,
};
use fs_err::read;
use git2::{ApplyLocation, ApplyOptions, Diff, DiffFormat, DiffLineType, Repository, Signature};
use relative_path::RelativePathBuf;
use std::{fs::read, path::Path};
use std::path::Path;
/// Set up a git repository for patches
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
@ -77,9 +78,9 @@ impl Project {
for (name, versions) in manifest.patches {
for (version_id, patch_path) in versions {
let patch_path = patch_path.to_path(self.package_dir());
let patch = Diff::from_buffer(&read(&patch_path).map_err(|e| {
errors::ApplyPatchesError::PatchReadError(patch_path.clone(), e)
})?)?;
let patch = Diff::from_buffer(
&read(&patch_path).map_err(errors::ApplyPatchesError::PatchRead)?,
)?;
let Some(node) = graph
.get(&name)
@ -134,8 +135,8 @@ impl Project {
// there is no way (as far as I know) to check if it's hardlinked
// so, we always unlink it
let content = read(&path).unwrap();
std::fs::remove_file(&path).unwrap();
std::fs::write(path, content).unwrap();
fs_err::remove_file(&path).unwrap();
fs_err::write(path, content).unwrap();
true
});
@ -144,9 +145,8 @@ impl Project {
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
std::fs::remove_dir_all(container_folder.join(".git")).map_err(|e| {
errors::ApplyPatchesError::GitDirectoryRemovalError(container_folder, e)
})?;
fs_err::remove_dir_all(container_folder.join(".git"))
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
}
}
@ -156,8 +156,6 @@ impl Project {
/// Errors that can occur when using patches
pub mod errors {
use std::path::PathBuf;
use thiserror::Error;
/// Errors that can occur when applying patches
@ -170,14 +168,14 @@ pub mod errors {
/// Error interacting with git
#[error("error interacting with git")]
GitError(#[from] git2::Error),
Git(#[from] git2::Error),
/// Error reading the patch file
#[error("error reading patch file at {0}")]
PatchReadError(PathBuf, #[source] std::io::Error),
#[error("error reading patch file")]
PatchRead(#[source] std::io::Error),
/// Error removing the .git directory
#[error("error removing .git directory")]
GitDirectoryRemovalError(PathBuf, #[source] std::io::Error),
DotGitRemove(#[source] std::io::Error),
}
}

View file

@ -35,7 +35,7 @@ pub enum PackageFS {
Copy(PathBuf, TargetKind),
}
fn make_readonly(_file: &std::fs::File) -> std::io::Result<()> {
fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> {
// on Windows, file deletion is disallowed if the file is read-only which breaks patching
#[cfg(not(windows))]
{
@ -56,11 +56,11 @@ pub(crate) fn store_in_cas<P: AsRef<Path>>(
let (prefix, rest) = hash.split_at(2);
let folder = cas_dir.as_ref().join(prefix);
std::fs::create_dir_all(&folder)?;
fs_err::create_dir_all(&folder)?;
let cas_path = folder.join(rest);
if !cas_path.exists() {
let mut file = std::fs::File::create(&cas_path)?;
let mut file = fs_err::File::create(&cas_path)?;
file.write_all(contents)?;
make_readonly(&file)?;
@ -74,7 +74,7 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
contents: &mut dyn Read,
) -> std::io::Result<String> {
let tmp_dir = cas_dir.as_ref().join(".tmp");
std::fs::create_dir_all(&tmp_dir)?;
fs_err::create_dir_all(&tmp_dir)?;
let mut hasher = Sha256::new();
let mut buf = [0; 8 * 1024];
let mut file_writer = BufWriter::new(tempfile::NamedTempFile::new_in(&tmp_dir)?);
@ -94,12 +94,12 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
let (prefix, rest) = hash.split_at(2);
let folder = cas_dir.as_ref().join(prefix);
std::fs::create_dir_all(&folder)?;
fs_err::create_dir_all(&folder)?;
let cas_path = folder.join(rest);
match file_writer.into_inner()?.persist_noclobber(cas_path) {
Ok(f) => {
make_readonly(&f)?;
match file_writer.into_inner()?.persist_noclobber(&cas_path) {
Ok(_) => {
make_readonly(&fs_err::File::open(cas_path)?)?;
}
Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {}
Err(e) => return Err(e.error),
@ -113,8 +113,8 @@ fn copy_dir_all(
dst: impl AsRef<Path>,
target: TargetKind,
) -> std::io::Result<()> {
std::fs::create_dir_all(&dst)?;
'outer: for entry in std::fs::read_dir(src)? {
fs_err::create_dir_all(&dst)?;
'outer: for entry in fs_err::read_dir(src.as_ref().to_path_buf())? {
let entry = entry?;
let ty = entry.file_type()?;
let file_name = entry.file_name().to_string_lossy().to_string();
@ -136,7 +136,7 @@ fn copy_dir_all(
continue;
}
std::fs::copy(entry.path(), dst.as_ref().join(file_name))?;
fs_err::copy(entry.path(), dst.as_ref().join(file_name))?;
}
}
Ok(())
@ -158,17 +158,17 @@ impl PackageFS {
match entry {
FSEntry::File(hash) => {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
fs_err::create_dir_all(parent)?;
}
let (prefix, rest) = hash.split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
if link {
std::fs::hard_link(cas_file_path, path)?;
fs_err::hard_link(cas_file_path, path)?;
} else {
let mut f = std::fs::File::create(&path)?;
f.write_all(&std::fs::read(cas_file_path)?)?;
let mut f = fs_err::File::create(&path)?;
f.write_all(&fs_err::read(cas_file_path)?)?;
#[cfg(unix)]
{
@ -180,7 +180,7 @@ impl PackageFS {
}
}
FSEntry::Directory => {
std::fs::create_dir_all(path)?;
fs_err::create_dir_all(path)?;
}
}
}
@ -205,6 +205,6 @@ impl PackageFS {
let (prefix, rest) = file_hash.as_ref().split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
std::fs::read_to_string(cas_file_path).ok()
fs_err::read_to_string(cas_file_path).ok()
}
}

View file

@ -335,7 +335,7 @@ impl PackageSource for GitPackageSource {
.join(hash(self.as_bytes()))
.join(&pkg_ref.tree_id);
match std::fs::read_to_string(&index_file) {
match fs_err::read_to_string(&index_file) {
Ok(s) => {
log::debug!(
"using cached index file for package {}#{} {}",
@ -490,10 +490,10 @@ impl PackageSource for GitPackageSource {
};
if let Some(parent) = index_file.parent() {
std::fs::create_dir_all(parent)?;
fs_err::create_dir_all(parent)?;
}
std::fs::write(
fs_err::write(
&index_file,
toml::to_string(&fs).map_err(|e| {
errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e)

View file

@ -140,7 +140,7 @@ pub trait GitBasedSource {
return Ok(());
}
std::fs::create_dir_all(&path)?;
fs_err::create_dir_all(&path)?;
let auth_config = project.auth_config.clone();

View file

@ -253,7 +253,7 @@ impl PackageSource for PesdePackageSource {
.join(pkg_ref.version.to_string())
.join(pkg_ref.target.to_string());
match std::fs::read_to_string(&index_file) {
match fs_err::read_to_string(&index_file) {
Ok(s) => {
log::debug!(
"using cached index file for package {}@{} {}",
@ -320,10 +320,10 @@ impl PackageSource for PesdePackageSource {
let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() {
std::fs::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
}
std::fs::write(&index_file, toml::to_string(&fs)?)
fs_err::write(&index_file, toml::to_string(&fs)?)
.map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, pkg_ref.target.clone()))

View file

@ -62,7 +62,7 @@ pub(crate) fn get_target(
let build_files = Default::default();
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
let manifest = std::fs::read_to_string(&manifest)?;
let manifest = fs_err::read_to_string(&manifest)?;
let manifest: WallyManifest = toml::from_str(&manifest)?;
Ok(if matches!(manifest.package.realm, Realm::Shared) {

View file

@ -151,7 +151,7 @@ impl PackageSource for WallyPackageSource {
.join(pkg_ref.name.escaped())
.join(pkg_ref.version.to_string());
let tempdir = match std::fs::read_to_string(&index_file) {
let tempdir = match fs_err::read_to_string(&index_file) {
Ok(s) => {
log::debug!(
"using cached index file for package {}@{}",
@ -198,7 +198,7 @@ impl PackageSource for WallyPackageSource {
let mut entries = BTreeMap::new();
let mut dir_entries = std::fs::read_dir(tempdir.path())?.collect::<VecDeque<_>>();
let mut dir_entries = fs_err::read_dir(tempdir.path())?.collect::<VecDeque<_>>();
while let Some(entry) = dir_entries.pop_front() {
let entry = entry?;
let path =
@ -210,7 +210,7 @@ impl PackageSource for WallyPackageSource {
}
entries.insert(path, FSEntry::Directory);
dir_entries.extend(std::fs::read_dir(entry.path())?);
dir_entries.extend(fs_err::read_dir(entry.path())?);
continue;
}
@ -219,7 +219,7 @@ impl PackageSource for WallyPackageSource {
continue;
}
let mut file = std::fs::File::open(entry.path())?;
let mut file = fs_err::File::open(entry.path())?;
let hash = store_reader_in_cas(project.cas_dir(), &mut file)?;
entries.insert(path, FSEntry::File(hash));
}
@ -227,10 +227,10 @@ impl PackageSource for WallyPackageSource {
let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() {
std::fs::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
}
std::fs::write(&index_file, toml::to_string(&fs)?)
fs_err::write(&index_file, toml::to_string(&fs)?)
.map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, get_target(project, &tempdir)?))