mirror of
https://github.com/pesde-pkg/pesde.git
synced 2024-12-12 11:00:36 +00:00
refactor: use fs-err for fs operations
This commit is contained in:
parent
c9dc788056
commit
09820e322c
30 changed files with 129 additions and 118 deletions
|
@ -9,6 +9,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
### Fixed
|
### Fixed
|
||||||
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
- Use a different algorithm for finding a CAS directory to avoid issues with mounted drives by @daimond113
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Switched to fs-err for better errors with file system operations by @daimond113
|
||||||
|
|
||||||
## [0.5.0-rc.7] - 2024-10-30
|
## [0.5.0-rc.7] - 2024-10-30
|
||||||
### Added
|
### Added
|
||||||
- New website by @lukadev-0
|
- New website by @lukadev-0
|
||||||
|
|
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -1426,6 +1426,15 @@ dependencies = [
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fs-err"
|
||||||
|
version = "3.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8bb60e7409f34ef959985bc9d9c5ee8f5db24ee46ed9775850548021710f807f"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fs4"
|
name = "fs4"
|
||||||
version = "0.8.4"
|
version = "0.8.4"
|
||||||
|
@ -3557,6 +3566,7 @@ dependencies = [
|
||||||
"colored",
|
"colored",
|
||||||
"dirs",
|
"dirs",
|
||||||
"flate2",
|
"flate2",
|
||||||
|
"fs-err",
|
||||||
"full_moon",
|
"full_moon",
|
||||||
"git2",
|
"git2",
|
||||||
"gix",
|
"gix",
|
||||||
|
@ -3600,6 +3610,7 @@ dependencies = [
|
||||||
"convert_case 0.6.0",
|
"convert_case 0.6.0",
|
||||||
"dotenvy",
|
"dotenvy",
|
||||||
"flate2",
|
"flate2",
|
||||||
|
"fs-err",
|
||||||
"futures",
|
"futures",
|
||||||
"git2",
|
"git2",
|
||||||
"gix",
|
"gix",
|
||||||
|
|
|
@ -26,7 +26,8 @@ bin = [
|
||||||
"open",
|
"open",
|
||||||
"gix/worktree-mutation",
|
"gix/worktree-mutation",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"winreg"
|
"winreg",
|
||||||
|
"fs-err/expose_original_error"
|
||||||
]
|
]
|
||||||
wally-compat = ["zip", "serde_json"]
|
wally-compat = ["zip", "serde_json"]
|
||||||
patches = ["git2"]
|
patches = ["git2"]
|
||||||
|
@ -60,6 +61,7 @@ chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.13.0"
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
|
fs-err = "3.0.0"
|
||||||
|
|
||||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||||
git2 = { version = "0.19.0", optional = true }
|
git2 = { version = "0.19.0", optional = true }
|
||||||
|
|
|
@ -18,6 +18,7 @@ chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
url = "2.5.2"
|
url = "2.5.2"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.13.0"
|
||||||
|
fs-err = "3.0.0"
|
||||||
|
|
||||||
git2 = "0.19.0"
|
git2 = "0.19.0"
|
||||||
gix = { version = "0.67.0", default-features = false, features = [
|
gix = { version = "0.67.0", default-features = false, features = [
|
||||||
|
|
|
@ -8,7 +8,6 @@ use serde::Deserialize;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeSet, HashMap},
|
collections::{BTreeSet, HashMap},
|
||||||
fs::read_dir,
|
|
||||||
io::{Cursor, Read, Write},
|
io::{Cursor, Read, Write},
|
||||||
};
|
};
|
||||||
use tar::Archive;
|
use tar::Archive;
|
||||||
|
@ -102,7 +101,7 @@ pub async fn publish_package(
|
||||||
let mut docs = BTreeSet::new();
|
let mut docs = BTreeSet::new();
|
||||||
let mut docs_pages = HashMap::new();
|
let mut docs_pages = HashMap::new();
|
||||||
|
|
||||||
for entry in read_dir(package_dir.path())? {
|
for entry in fs_err::read_dir(package_dir.path())? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let file_name = entry
|
let file_name = entry
|
||||||
.file_name()
|
.file_name()
|
||||||
|
@ -118,7 +117,7 @@ pub async fn publish_package(
|
||||||
if file_name == "docs" {
|
if file_name == "docs" {
|
||||||
let mut stack = vec![(
|
let mut stack = vec![(
|
||||||
BTreeSet::new(),
|
BTreeSet::new(),
|
||||||
read_dir(entry.path())?,
|
fs_err::read_dir(entry.path())?,
|
||||||
None::<DocEntryInfo>,
|
None::<DocEntryInfo>,
|
||||||
)];
|
)];
|
||||||
|
|
||||||
|
@ -134,7 +133,7 @@ pub async fn publish_package(
|
||||||
if entry.file_type()?.is_dir() {
|
if entry.file_type()?.is_dir() {
|
||||||
stack.push((
|
stack.push((
|
||||||
BTreeSet::new(),
|
BTreeSet::new(),
|
||||||
read_dir(entry.path())?,
|
fs_err::read_dir(entry.path())?,
|
||||||
Some(DocEntryInfo {
|
Some(DocEntryInfo {
|
||||||
label: Some(file_name.to_case(Case::Title)),
|
label: Some(file_name.to_case(Case::Title)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -144,7 +143,7 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_name == "_category_.json" {
|
if file_name == "_category_.json" {
|
||||||
let info = std::fs::read_to_string(entry.path())?;
|
let info = fs_err::read_to_string(entry.path())?;
|
||||||
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
||||||
let old_info = category_info.take();
|
let old_info = category_info.take();
|
||||||
info.label = info.label.or(old_info.and_then(|i| i.label));
|
info.label = info.label.or(old_info.and_then(|i| i.label));
|
||||||
|
@ -156,7 +155,7 @@ pub async fn publish_package(
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let content = std::fs::read_to_string(entry.path())?;
|
let content = fs_err::read_to_string(entry.path())?;
|
||||||
let content = content.trim();
|
let content = content.trim();
|
||||||
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
||||||
|
|
||||||
|
@ -246,7 +245,7 @@ pub async fn publish_package(
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_name == MANIFEST_FILE_NAME {
|
if file_name == MANIFEST_FILE_NAME {
|
||||||
let content = std::fs::read_to_string(entry.path())?;
|
let content = fs_err::read_to_string(entry.path())?;
|
||||||
|
|
||||||
manifest = Some(toml::de::from_str(&content)?);
|
manifest = Some(toml::de::from_str(&content)?);
|
||||||
} else if file_name
|
} else if file_name
|
||||||
|
@ -259,7 +258,7 @@ pub async fn publish_package(
|
||||||
return Err(Error::InvalidArchive);
|
return Err(Error::InvalidArchive);
|
||||||
}
|
}
|
||||||
|
|
||||||
let file = std::fs::File::open(entry.path())?;
|
let file = fs_err::File::open(entry.path())?;
|
||||||
|
|
||||||
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best());
|
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best());
|
||||||
let mut bytes = vec![];
|
let mut bytes = vec![];
|
||||||
|
|
|
@ -6,7 +6,7 @@ use actix_web::{
|
||||||
web, App, HttpServer,
|
web, App, HttpServer,
|
||||||
};
|
};
|
||||||
use log::info;
|
use log::info;
|
||||||
use std::{env::current_dir, fs::create_dir_all, path::PathBuf, sync::Mutex};
|
use std::{env::current_dir, path::PathBuf, sync::Mutex};
|
||||||
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||||
|
@ -86,7 +86,7 @@ async fn run() -> std::io::Result<()> {
|
||||||
|
|
||||||
let cwd = current_dir().unwrap();
|
let cwd = current_dir().unwrap();
|
||||||
let data_dir = cwd.join("data");
|
let data_dir = cwd.join("data");
|
||||||
create_dir_all(&data_dir).unwrap();
|
fs_err::create_dir_all(&data_dir).unwrap();
|
||||||
|
|
||||||
let project = Project::new(
|
let project = Project::new(
|
||||||
&cwd,
|
&cwd,
|
||||||
|
|
|
@ -6,7 +6,6 @@ use actix_web::{
|
||||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Display,
|
fmt::Display,
|
||||||
fs::create_dir_all,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -16,7 +15,7 @@ pub struct FSStorage {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
|
fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
|
||||||
Ok(match std::fs::read(path) {
|
Ok(match fs_err::read(path) {
|
||||||
Ok(contents) => HttpResponse::Ok()
|
Ok(contents) => HttpResponse::Ok()
|
||||||
.append_header((CONTENT_TYPE, content_type))
|
.append_header((CONTENT_TYPE, content_type))
|
||||||
.append_header((CONTENT_ENCODING, "gzip"))
|
.append_header((CONTENT_ENCODING, "gzip"))
|
||||||
|
@ -41,9 +40,9 @@ impl StorageImpl for FSStorage {
|
||||||
.join(name)
|
.join(name)
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
create_dir_all(&path)?;
|
fs_err::create_dir_all(&path)?;
|
||||||
|
|
||||||
std::fs::write(path.join("pkg.tar.gz"), &contents)?;
|
fs_err::write(path.join("pkg.tar.gz"), &contents)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -79,9 +78,9 @@ impl StorageImpl for FSStorage {
|
||||||
.join(name)
|
.join(name)
|
||||||
.join(version.version().to_string())
|
.join(version.version().to_string())
|
||||||
.join(version.target().to_string());
|
.join(version.target().to_string());
|
||||||
create_dir_all(&path)?;
|
fs_err::create_dir_all(&path)?;
|
||||||
|
|
||||||
std::fs::write(path.join("readme.gz"), &contents)?;
|
fs_err::write(path.join("readme.gz"), &contents)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -105,9 +104,9 @@ impl StorageImpl for FSStorage {
|
||||||
|
|
||||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
||||||
let path = self.root.join("Doc");
|
let path = self.root.join("Doc");
|
||||||
create_dir_all(&path)?;
|
fs_err::create_dir_all(&path)?;
|
||||||
|
|
||||||
std::fs::write(path.join(format!("{doc_hash}.gz")), &contents)?;
|
fs_err::write(path.join(format!("{doc_hash}.gz")), &contents)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ impl ScriptsRepoCommand {
|
||||||
config.scripts_repo = repo.clone();
|
config.scripts_repo = repo.clone();
|
||||||
write_config(&config)?;
|
write_config(&config)?;
|
||||||
|
|
||||||
std::fs::remove_dir_all(home_dir()?.join("scripts"))
|
fs_err::remove_dir_all(home_dir()?.join("scripts"))
|
||||||
.context("failed to remove scripts directory")?;
|
.context("failed to remove scripts directory")?;
|
||||||
|
|
||||||
println!("scripts repo set to: {repo}");
|
println!("scripts repo set to: {repo}");
|
||||||
|
|
|
@ -78,7 +78,7 @@ impl ExecuteCommand {
|
||||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||||
|
|
||||||
let tmp_dir = project.cas_dir().join(".tmp");
|
let tmp_dir = project.cas_dir().join(".tmp");
|
||||||
std::fs::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
|
fs_err::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
|
||||||
|
|
||||||
let tempdir =
|
let tempdir =
|
||||||
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
||||||
|
|
|
@ -125,9 +125,9 @@ impl InitCommand {
|
||||||
let folder = project
|
let folder = project
|
||||||
.package_dir()
|
.package_dir()
|
||||||
.join(concat!(".", env!("CARGO_PKG_NAME")));
|
.join(concat!(".", env!("CARGO_PKG_NAME")));
|
||||||
std::fs::create_dir_all(&folder).context("failed to create scripts folder")?;
|
fs_err::create_dir_all(&folder).context("failed to create scripts folder")?;
|
||||||
|
|
||||||
std::fs::write(
|
fs_err::write(
|
||||||
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
|
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
|
||||||
script_contents(Path::new(&format!(
|
script_contents(Path::new(&format!(
|
||||||
"lune/rojo/{}.luau",
|
"lune/rojo/{}.luau",
|
||||||
|
@ -137,7 +137,7 @@ impl InitCommand {
|
||||||
.context("failed to write sync config generator script file")?;
|
.context("failed to write sync config generator script file")?;
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
std::fs::write(
|
fs_err::write(
|
||||||
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
|
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
|
||||||
script_contents(Path::new(&format!(
|
script_contents(Path::new(&format!(
|
||||||
"lune/rojo/{}.luau",
|
"lune/rojo/{}.luau",
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl InstallCommand {
|
||||||
if deleted_folders.insert(folder.to_string()) {
|
if deleted_folders.insert(folder.to_string()) {
|
||||||
log::debug!("deleting the {folder} folder");
|
log::debug!("deleting the {folder} folder");
|
||||||
|
|
||||||
if let Some(e) = std::fs::remove_dir_all(project.package_dir().join(&folder))
|
if let Some(e) = fs_err::remove_dir_all(project.package_dir().join(&folder))
|
||||||
.err()
|
.err()
|
||||||
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
|
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
|
||||||
{
|
{
|
||||||
|
@ -245,7 +245,7 @@ impl InstallCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
let bin_file = bin_folder.join(alias);
|
let bin_file = bin_folder.join(alias);
|
||||||
std::fs::write(&bin_file, bin_link_file(alias))
|
fs_err::write(&bin_file, bin_link_file(alias))
|
||||||
.context("failed to write bin link file")?;
|
.context("failed to write bin link file")?;
|
||||||
|
|
||||||
make_executable(&bin_file).context("failed to make bin link executable")?;
|
make_executable(&bin_file).context("failed to make bin link executable")?;
|
||||||
|
@ -253,7 +253,7 @@ impl InstallCommand {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
|
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
|
||||||
std::fs::copy(
|
fs_err::copy(
|
||||||
std::env::current_exe().context("failed to get current executable path")?,
|
std::env::current_exe().context("failed to get current executable path")?,
|
||||||
&bin_file,
|
&bin_file,
|
||||||
)
|
)
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl PatchCommand {
|
||||||
.join(name.escaped())
|
.join(name.escaped())
|
||||||
.join(version_id.escaped())
|
.join(version_id.escaped())
|
||||||
.join(chrono::Utc::now().timestamp().to_string());
|
.join(chrono::Utc::now().timestamp().to_string());
|
||||||
std::fs::create_dir_all(&directory)?;
|
fs_err::create_dir_all(&directory)?;
|
||||||
|
|
||||||
source
|
source
|
||||||
.download(&node.node.pkg_ref, &project, &reqwest)?
|
.download(&node.node.pkg_ref, &project, &reqwest)?
|
||||||
|
|
|
@ -53,10 +53,10 @@ impl PatchCommitCommand {
|
||||||
.context("failed to parse manifest")?;
|
.context("failed to parse manifest")?;
|
||||||
|
|
||||||
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
||||||
std::fs::remove_dir_all(self.directory).context("failed to remove patch directory")?;
|
fs_err::remove_dir_all(self.directory).context("failed to remove patch directory")?;
|
||||||
|
|
||||||
let patches_dir = project.package_dir().join("patches");
|
let patches_dir = project.package_dir().join("patches");
|
||||||
std::fs::create_dir_all(&patches_dir).context("failed to create patches directory")?;
|
fs_err::create_dir_all(&patches_dir).context("failed to create patches directory")?;
|
||||||
|
|
||||||
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
|
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ impl PatchCommitCommand {
|
||||||
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(&patch_file, patch).context("failed to write patch file")?;
|
fs_err::write(&patch_file, patch).context("failed to write patch file")?;
|
||||||
|
|
||||||
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
[&name.to_string()][&version_id.to_string()] =
|
[&name.to_string()][&version_id.to_string()] =
|
||||||
|
|
|
@ -176,7 +176,7 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
let contents =
|
let contents =
|
||||||
std::fs::read_to_string(&export_path).context(format!("failed to read {name}"))?;
|
fs_err::read_to_string(&export_path).context(format!("failed to read {name}"))?;
|
||||||
|
|
||||||
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
|
||||||
errs.into_iter()
|
errs.into_iter()
|
||||||
|
@ -235,8 +235,9 @@ impl PublishCommand {
|
||||||
|
|
||||||
archive.append_file(
|
archive.append_file(
|
||||||
included_name,
|
included_name,
|
||||||
&mut std::fs::File::open(&included_path)
|
fs_err::File::open(&included_path)
|
||||||
.context(format!("failed to read {included_name}"))?,
|
.context(format!("failed to read {included_name}"))?
|
||||||
|
.file_mut(),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
display_includes.push(format!("{included_name}/*"));
|
display_includes.push(format!("{included_name}/*"));
|
||||||
|
@ -340,7 +341,7 @@ impl PublishCommand {
|
||||||
.context("failed to get workspace directory")?,
|
.context("failed to get workspace directory")?,
|
||||||
)
|
)
|
||||||
.join(MANIFEST_FILE_NAME);
|
.join(MANIFEST_FILE_NAME);
|
||||||
let manifest = std::fs::read_to_string(&manifest)
|
let manifest = fs_err::read_to_string(&manifest)
|
||||||
.context("failed to read workspace package manifest")?;
|
.context("failed to read workspace package manifest")?;
|
||||||
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
|
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
|
||||||
.context("failed to parse workspace package manifest")?;
|
.context("failed to parse workspace package manifest")?;
|
||||||
|
@ -489,7 +490,7 @@ impl PublishCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.dry_run {
|
if self.dry_run {
|
||||||
std::fs::write("package.tar.gz", archive)?;
|
fs_err::write("package.tar.gz", archive)?;
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"{}",
|
"{}",
|
||||||
|
|
|
@ -39,7 +39,7 @@ impl Default for CliConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_config() -> anyhow::Result<CliConfig> {
|
pub fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
let config_string = match std::fs::read_to_string(home_dir()?.join("config.toml")) {
|
let config_string = match fs_err::read_to_string(home_dir()?.join("config.toml")) {
|
||||||
Ok(config_string) => config_string,
|
Ok(config_string) => config_string,
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
return Ok(CliConfig::default());
|
return Ok(CliConfig::default());
|
||||||
|
@ -54,7 +54,7 @@ pub fn read_config() -> anyhow::Result<CliConfig> {
|
||||||
|
|
||||||
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
||||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||||
std::fs::write(home_dir()?.join("config.toml"), config_string)
|
fs_err::write(home_dir()?.join("config.toml"), config_string)
|
||||||
.context("failed to write config file")?;
|
.context("failed to write config file")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -6,11 +6,11 @@ pub fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use std::os::unix::fs::PermissionsExt;
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
|
||||||
let mut perms = std::fs::metadata(&_path)
|
let mut perms = fs_err::metadata(&_path)
|
||||||
.context("failed to get bin link file metadata")?
|
.context("failed to get bin link file metadata")?
|
||||||
.permissions();
|
.permissions();
|
||||||
perms.set_mode(perms.mode() | 0o111);
|
perms.set_mode(perms.mode() | 0o111);
|
||||||
std::fs::set_permissions(&_path, perms)
|
fs_err::set_permissions(&_path, perms)
|
||||||
.context("failed to set bin link file permissions")?;
|
.context("failed to set bin link file permissions")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ use pesde::{
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, HashSet},
|
collections::{BTreeMap, HashSet},
|
||||||
fs::create_dir_all,
|
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
|
@ -35,7 +34,7 @@ pub fn home_dir() -> anyhow::Result<PathBuf> {
|
||||||
|
|
||||||
pub fn bin_dir() -> anyhow::Result<PathBuf> {
|
pub fn bin_dir() -> anyhow::Result<PathBuf> {
|
||||||
let bin_dir = home_dir()?.join("bin");
|
let bin_dir = home_dir()?.join("bin");
|
||||||
create_dir_all(&bin_dir).context("failed to create bin folder")?;
|
fs_err::create_dir_all(&bin_dir).context("failed to create bin folder")?;
|
||||||
Ok(bin_dir)
|
Ok(bin_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -86,7 +86,7 @@ fn update_repo<P: AsRef<Path>>(
|
||||||
.write(gix::index::write::Options::default())
|
.write(gix::index::write::Options::default())
|
||||||
.context("failed to write index")?;
|
.context("failed to write index")?;
|
||||||
} else {
|
} else {
|
||||||
std::fs::create_dir_all(path).context(format!("failed to create {name} directory"))?;
|
fs_err::create_dir_all(path).context(format!("failed to create {name} directory"))?;
|
||||||
|
|
||||||
gix::prepare_clone(url, path)
|
gix::prepare_clone(url, path)
|
||||||
.context(format!("failed to prepare {name} repository clone"))?
|
.context(format!("failed to prepare {name} repository clone"))?
|
||||||
|
|
|
@ -3,7 +3,7 @@ use colored::Colorize;
|
||||||
use reqwest::header::ACCEPT;
|
use reqwest::header::ACCEPT;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{fs::create_dir_all, io::Read, path::PathBuf};
|
use std::{io::Read, path::PathBuf};
|
||||||
|
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
bin_dir,
|
bin_dir,
|
||||||
|
@ -157,7 +157,7 @@ pub fn get_or_download_version(
|
||||||
version: &Version,
|
version: &Version,
|
||||||
) -> anyhow::Result<Option<PathBuf>> {
|
) -> anyhow::Result<Option<PathBuf>> {
|
||||||
let path = home_dir()?.join("versions");
|
let path = home_dir()?.join("versions");
|
||||||
create_dir_all(&path).context("failed to create versions directory")?;
|
fs_err::create_dir_all(&path).context("failed to create versions directory")?;
|
||||||
|
|
||||||
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
|
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
|
||||||
|
|
||||||
|
@ -172,11 +172,11 @@ pub fn get_or_download_version(
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_requested_version {
|
if is_requested_version {
|
||||||
std::fs::copy(std::env::current_exe()?, &path)
|
fs_err::copy(std::env::current_exe()?, &path)
|
||||||
.context("failed to copy current executable to version directory")?;
|
.context("failed to copy current executable to version directory")?;
|
||||||
} else {
|
} else {
|
||||||
let bytes = download_github_release(reqwest, version)?;
|
let bytes = download_github_release(reqwest, version)?;
|
||||||
std::fs::write(&path, bytes).context("failed to write downloaded version file")?;
|
fs_err::write(&path, bytes).context("failed to write downloaded version file")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
make_executable(&path).context("failed to make downloaded version executable")?;
|
make_executable(&path).context("failed to make downloaded version executable")?;
|
||||||
|
@ -190,9 +190,9 @@ pub fn get_or_download_version(
|
||||||
|
|
||||||
pub fn max_installed_version() -> anyhow::Result<Version> {
|
pub fn max_installed_version() -> anyhow::Result<Version> {
|
||||||
let versions_dir = home_dir()?.join("versions");
|
let versions_dir = home_dir()?.join("versions");
|
||||||
create_dir_all(&versions_dir).context("failed to create versions directory")?;
|
fs_err::create_dir_all(&versions_dir).context("failed to create versions directory")?;
|
||||||
|
|
||||||
let max_version = std::fs::read_dir(versions_dir)
|
let max_version = fs_err::read_dir(versions_dir)
|
||||||
.context("failed to read versions directory")?
|
.context("failed to read versions directory")?
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -228,7 +228,7 @@ pub fn update_bin_exe() -> anyhow::Result<()> {
|
||||||
std::env::consts::EXE_SUFFIX
|
std::env::consts::EXE_SUFFIX
|
||||||
));
|
));
|
||||||
|
|
||||||
std::fs::copy(std::env::current_exe()?, ©_to)
|
fs_err::copy(std::env::current_exe()?, ©_to)
|
||||||
.context("failed to copy executable to bin folder")?;
|
.context("failed to copy executable to bin folder")?;
|
||||||
|
|
||||||
make_executable(©_to)
|
make_executable(©_to)
|
||||||
|
|
|
@ -7,9 +7,9 @@ use crate::{
|
||||||
},
|
},
|
||||||
Project, PACKAGES_CONTAINER_NAME,
|
Project, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
|
use fs_err::create_dir_all;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
fs::create_dir_all,
|
|
||||||
sync::{mpsc::Receiver, Arc, Mutex},
|
sync::{mpsc::Receiver, Arc, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ pub mod errors {
|
||||||
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
||||||
|
|
||||||
/// Error interacting with the filesystem
|
/// Error interacting with the filesystem
|
||||||
#[error("error interacting with filesystem")]
|
#[error("error interacting with the filesystem")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
/// Error downloading a package
|
/// Error downloading a package
|
||||||
|
|
22
src/lib.rs
22
src/lib.rs
|
@ -138,31 +138,31 @@ impl Project {
|
||||||
|
|
||||||
/// Read the manifest file
|
/// Read the manifest file
|
||||||
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
||||||
Ok(string)
|
Ok(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize the manifest file
|
/// Deserialize the manifest file
|
||||||
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the manifest file
|
/// Write the manifest file
|
||||||
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
||||||
std::fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
|
fs_err::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize the lockfile
|
/// Deserialize the lockfile
|
||||||
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
||||||
let string = std::fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
|
let string = fs_err::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the lockfile
|
/// Write the lockfile
|
||||||
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> {
|
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> {
|
||||||
let string = toml::to_string(&lockfile)?;
|
let string = toml::to_string(&lockfile)?;
|
||||||
std::fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
|
fs_err::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,8 +172,8 @@ impl Project {
|
||||||
dir: P,
|
dir: P,
|
||||||
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> {
|
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> {
|
||||||
let dir = dir.as_ref().to_path_buf();
|
let dir = dir.as_ref().to_path_buf();
|
||||||
let manifest = std::fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
|
let manifest = fs_err::read_to_string(dir.join(MANIFEST_FILE_NAME))
|
||||||
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(dir.to_path_buf(), e))?;
|
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
|
||||||
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
||||||
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
|
@ -191,8 +191,8 @@ impl Project {
|
||||||
members
|
members
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
|
||||||
.map_err(|e| errors::WorkspaceMembersError::ManifestMissing(path.clone(), e))?;
|
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
|
||||||
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
|
||||||
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
|
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
|
||||||
})?;
|
})?;
|
||||||
|
@ -251,8 +251,8 @@ pub mod errors {
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum WorkspaceMembersError {
|
pub enum WorkspaceMembersError {
|
||||||
/// The manifest file could not be found
|
/// The manifest file could not be found
|
||||||
#[error("missing manifest file at {0}")]
|
#[error("missing manifest file")]
|
||||||
ManifestMissing(PathBuf, #[source] std::io::Error),
|
ManifestMissing(#[source] std::io::Error),
|
||||||
|
|
||||||
/// An error occurred deserializing the manifest file
|
/// An error occurred deserializing the manifest file
|
||||||
#[error("error deserializing manifest file at {0}")]
|
#[error("error deserializing manifest file at {0}")]
|
||||||
|
|
|
@ -9,7 +9,6 @@ use crate::{
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::create_dir_all,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -18,14 +17,14 @@ pub mod generator;
|
||||||
|
|
||||||
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
|
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
|
||||||
let p = path.as_ref();
|
let p = path.as_ref();
|
||||||
create_dir_all(p)?;
|
fs_err::create_dir_all(p)?;
|
||||||
p.canonicalize()
|
p.canonicalize()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||||
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1;
|
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1;
|
||||||
|
|
||||||
std::fs::hard_link(cas_path, destination)
|
fs_err::hard_link(cas_path, destination)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
|
@ -58,7 +57,7 @@ impl Project {
|
||||||
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
|
||||||
let lib_file = lib_file.to_path(&container_folder);
|
let lib_file = lib_file.to_path(&container_folder);
|
||||||
|
|
||||||
let contents = match std::fs::read_to_string(&lib_file) {
|
let contents = match fs_err::read_to_string(&lib_file) {
|
||||||
Ok(contents) => contents,
|
Ok(contents) => contents,
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||||
return Err(errors::LinkingError::LibFileNotFound(
|
return Err(errors::LinkingError::LibFileNotFound(
|
||||||
|
|
|
@ -11,7 +11,6 @@ use indicatif_log_bridge::LogWrapper;
|
||||||
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
|
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
fs::{create_dir_all, hard_link, remove_file},
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
thread::spawn,
|
thread::spawn,
|
||||||
};
|
};
|
||||||
|
@ -45,8 +44,8 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
|
||||||
|
|
||||||
let try_path = curr_path.join(temp_file_name);
|
let try_path = curr_path.join(temp_file_name);
|
||||||
|
|
||||||
if hard_link(file_to_try.path(), &try_path).is_ok() {
|
if fs_err::hard_link(file_to_try.path(), &try_path).is_ok() {
|
||||||
if let Err(err) = remove_file(&try_path) {
|
if let Err(err) = fs_err::remove_file(&try_path) {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
"failed to remove temporary file at {}: {err}",
|
"failed to remove temporary file at {}: {err}",
|
||||||
try_path.display()
|
try_path.display()
|
||||||
|
@ -109,7 +108,7 @@ fn run() -> anyhow::Result<()> {
|
||||||
let mut workspace_dir = None::<PathBuf>;
|
let mut workspace_dir = None::<PathBuf>;
|
||||||
|
|
||||||
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
|
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
|
||||||
let manifest = std::fs::read_to_string(path.join(MANIFEST_FILE_NAME))
|
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
let manifest: pesde::manifest::Manifest =
|
let manifest: pesde::manifest::Manifest =
|
||||||
toml::from_str(&manifest).context("failed to parse manifest")?;
|
toml::from_str(&manifest).context("failed to parse manifest")?;
|
||||||
|
@ -180,7 +179,7 @@ fn run() -> anyhow::Result<()> {
|
||||||
|
|
||||||
let home_dir = home_dir()?;
|
let home_dir = home_dir()?;
|
||||||
let data_dir = home_dir.join("data");
|
let data_dir = home_dir.join("data");
|
||||||
create_dir_all(&data_dir).expect("failed to create data directory");
|
fs_err::create_dir_all(&data_dir).expect("failed to create data directory");
|
||||||
|
|
||||||
let cas_dir = get_linkable_dir(&project_root_dir).join(HOME_DIR);
|
let cas_dir = get_linkable_dir(&project_root_dir).join(HOME_DIR);
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,10 @@ use crate::{
|
||||||
lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME,
|
lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME,
|
||||||
PACKAGES_CONTAINER_NAME,
|
PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
|
use fs_err::read;
|
||||||
use git2::{ApplyLocation, ApplyOptions, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
use git2::{ApplyLocation, ApplyOptions, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{fs::read, path::Path};
|
use std::path::Path;
|
||||||
|
|
||||||
/// Set up a git repository for patches
|
/// Set up a git repository for patches
|
||||||
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
||||||
|
@ -77,9 +78,9 @@ impl Project {
|
||||||
for (name, versions) in manifest.patches {
|
for (name, versions) in manifest.patches {
|
||||||
for (version_id, patch_path) in versions {
|
for (version_id, patch_path) in versions {
|
||||||
let patch_path = patch_path.to_path(self.package_dir());
|
let patch_path = patch_path.to_path(self.package_dir());
|
||||||
let patch = Diff::from_buffer(&read(&patch_path).map_err(|e| {
|
let patch = Diff::from_buffer(
|
||||||
errors::ApplyPatchesError::PatchReadError(patch_path.clone(), e)
|
&read(&patch_path).map_err(errors::ApplyPatchesError::PatchRead)?,
|
||||||
})?)?;
|
)?;
|
||||||
|
|
||||||
let Some(node) = graph
|
let Some(node) = graph
|
||||||
.get(&name)
|
.get(&name)
|
||||||
|
@ -134,8 +135,8 @@ impl Project {
|
||||||
// there is no way (as far as I know) to check if it's hardlinked
|
// there is no way (as far as I know) to check if it's hardlinked
|
||||||
// so, we always unlink it
|
// so, we always unlink it
|
||||||
let content = read(&path).unwrap();
|
let content = read(&path).unwrap();
|
||||||
std::fs::remove_file(&path).unwrap();
|
fs_err::remove_file(&path).unwrap();
|
||||||
std::fs::write(path, content).unwrap();
|
fs_err::write(path, content).unwrap();
|
||||||
|
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
|
@ -144,9 +145,8 @@ impl Project {
|
||||||
|
|
||||||
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
|
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
|
||||||
|
|
||||||
std::fs::remove_dir_all(container_folder.join(".git")).map_err(|e| {
|
fs_err::remove_dir_all(container_folder.join(".git"))
|
||||||
errors::ApplyPatchesError::GitDirectoryRemovalError(container_folder, e)
|
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,8 +156,6 @@ impl Project {
|
||||||
|
|
||||||
/// Errors that can occur when using patches
|
/// Errors that can occur when using patches
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// Errors that can occur when applying patches
|
/// Errors that can occur when applying patches
|
||||||
|
@ -170,14 +168,14 @@ pub mod errors {
|
||||||
|
|
||||||
/// Error interacting with git
|
/// Error interacting with git
|
||||||
#[error("error interacting with git")]
|
#[error("error interacting with git")]
|
||||||
GitError(#[from] git2::Error),
|
Git(#[from] git2::Error),
|
||||||
|
|
||||||
/// Error reading the patch file
|
/// Error reading the patch file
|
||||||
#[error("error reading patch file at {0}")]
|
#[error("error reading patch file")]
|
||||||
PatchReadError(PathBuf, #[source] std::io::Error),
|
PatchRead(#[source] std::io::Error),
|
||||||
|
|
||||||
/// Error removing the .git directory
|
/// Error removing the .git directory
|
||||||
#[error("error removing .git directory")]
|
#[error("error removing .git directory")]
|
||||||
GitDirectoryRemovalError(PathBuf, #[source] std::io::Error),
|
DotGitRemove(#[source] std::io::Error),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub enum PackageFS {
|
||||||
Copy(PathBuf, TargetKind),
|
Copy(PathBuf, TargetKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_readonly(_file: &std::fs::File) -> std::io::Result<()> {
|
fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> {
|
||||||
// on Windows, file deletion is disallowed if the file is read-only which breaks patching
|
// on Windows, file deletion is disallowed if the file is read-only which breaks patching
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
{
|
{
|
||||||
|
@ -56,11 +56,11 @@ pub(crate) fn store_in_cas<P: AsRef<Path>>(
|
||||||
let (prefix, rest) = hash.split_at(2);
|
let (prefix, rest) = hash.split_at(2);
|
||||||
|
|
||||||
let folder = cas_dir.as_ref().join(prefix);
|
let folder = cas_dir.as_ref().join(prefix);
|
||||||
std::fs::create_dir_all(&folder)?;
|
fs_err::create_dir_all(&folder)?;
|
||||||
|
|
||||||
let cas_path = folder.join(rest);
|
let cas_path = folder.join(rest);
|
||||||
if !cas_path.exists() {
|
if !cas_path.exists() {
|
||||||
let mut file = std::fs::File::create(&cas_path)?;
|
let mut file = fs_err::File::create(&cas_path)?;
|
||||||
file.write_all(contents)?;
|
file.write_all(contents)?;
|
||||||
|
|
||||||
make_readonly(&file)?;
|
make_readonly(&file)?;
|
||||||
|
@ -74,7 +74,7 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
|
||||||
contents: &mut dyn Read,
|
contents: &mut dyn Read,
|
||||||
) -> std::io::Result<String> {
|
) -> std::io::Result<String> {
|
||||||
let tmp_dir = cas_dir.as_ref().join(".tmp");
|
let tmp_dir = cas_dir.as_ref().join(".tmp");
|
||||||
std::fs::create_dir_all(&tmp_dir)?;
|
fs_err::create_dir_all(&tmp_dir)?;
|
||||||
let mut hasher = Sha256::new();
|
let mut hasher = Sha256::new();
|
||||||
let mut buf = [0; 8 * 1024];
|
let mut buf = [0; 8 * 1024];
|
||||||
let mut file_writer = BufWriter::new(tempfile::NamedTempFile::new_in(&tmp_dir)?);
|
let mut file_writer = BufWriter::new(tempfile::NamedTempFile::new_in(&tmp_dir)?);
|
||||||
|
@ -94,12 +94,12 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
|
||||||
let (prefix, rest) = hash.split_at(2);
|
let (prefix, rest) = hash.split_at(2);
|
||||||
|
|
||||||
let folder = cas_dir.as_ref().join(prefix);
|
let folder = cas_dir.as_ref().join(prefix);
|
||||||
std::fs::create_dir_all(&folder)?;
|
fs_err::create_dir_all(&folder)?;
|
||||||
|
|
||||||
let cas_path = folder.join(rest);
|
let cas_path = folder.join(rest);
|
||||||
match file_writer.into_inner()?.persist_noclobber(cas_path) {
|
match file_writer.into_inner()?.persist_noclobber(&cas_path) {
|
||||||
Ok(f) => {
|
Ok(_) => {
|
||||||
make_readonly(&f)?;
|
make_readonly(&fs_err::File::open(cas_path)?)?;
|
||||||
}
|
}
|
||||||
Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {}
|
Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {}
|
||||||
Err(e) => return Err(e.error),
|
Err(e) => return Err(e.error),
|
||||||
|
@ -113,8 +113,8 @@ fn copy_dir_all(
|
||||||
dst: impl AsRef<Path>,
|
dst: impl AsRef<Path>,
|
||||||
target: TargetKind,
|
target: TargetKind,
|
||||||
) -> std::io::Result<()> {
|
) -> std::io::Result<()> {
|
||||||
std::fs::create_dir_all(&dst)?;
|
fs_err::create_dir_all(&dst)?;
|
||||||
'outer: for entry in std::fs::read_dir(src)? {
|
'outer: for entry in fs_err::read_dir(src.as_ref().to_path_buf())? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let ty = entry.file_type()?;
|
let ty = entry.file_type()?;
|
||||||
let file_name = entry.file_name().to_string_lossy().to_string();
|
let file_name = entry.file_name().to_string_lossy().to_string();
|
||||||
|
@ -136,7 +136,7 @@ fn copy_dir_all(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::copy(entry.path(), dst.as_ref().join(file_name))?;
|
fs_err::copy(entry.path(), dst.as_ref().join(file_name))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -158,17 +158,17 @@ impl PackageFS {
|
||||||
match entry {
|
match entry {
|
||||||
FSEntry::File(hash) => {
|
FSEntry::File(hash) => {
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
std::fs::create_dir_all(parent)?;
|
fs_err::create_dir_all(parent)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (prefix, rest) = hash.split_at(2);
|
let (prefix, rest) = hash.split_at(2);
|
||||||
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
|
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
|
||||||
|
|
||||||
if link {
|
if link {
|
||||||
std::fs::hard_link(cas_file_path, path)?;
|
fs_err::hard_link(cas_file_path, path)?;
|
||||||
} else {
|
} else {
|
||||||
let mut f = std::fs::File::create(&path)?;
|
let mut f = fs_err::File::create(&path)?;
|
||||||
f.write_all(&std::fs::read(cas_file_path)?)?;
|
f.write_all(&fs_err::read(cas_file_path)?)?;
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
|
@ -180,7 +180,7 @@ impl PackageFS {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FSEntry::Directory => {
|
FSEntry::Directory => {
|
||||||
std::fs::create_dir_all(path)?;
|
fs_err::create_dir_all(path)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -205,6 +205,6 @@ impl PackageFS {
|
||||||
|
|
||||||
let (prefix, rest) = file_hash.as_ref().split_at(2);
|
let (prefix, rest) = file_hash.as_ref().split_at(2);
|
||||||
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
|
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
|
||||||
std::fs::read_to_string(cas_file_path).ok()
|
fs_err::read_to_string(cas_file_path).ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -335,7 +335,7 @@ impl PackageSource for GitPackageSource {
|
||||||
.join(hash(self.as_bytes()))
|
.join(hash(self.as_bytes()))
|
||||||
.join(&pkg_ref.tree_id);
|
.join(&pkg_ref.tree_id);
|
||||||
|
|
||||||
match std::fs::read_to_string(&index_file) {
|
match fs_err::read_to_string(&index_file) {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"using cached index file for package {}#{} {}",
|
"using cached index file for package {}#{} {}",
|
||||||
|
@ -490,10 +490,10 @@ impl PackageSource for GitPackageSource {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
std::fs::create_dir_all(parent)?;
|
fs_err::create_dir_all(parent)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(
|
fs_err::write(
|
||||||
&index_file,
|
&index_file,
|
||||||
toml::to_string(&fs).map_err(|e| {
|
toml::to_string(&fs).map_err(|e| {
|
||||||
errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e)
|
errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e)
|
||||||
|
|
|
@ -140,7 +140,7 @@ pub trait GitBasedSource {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::create_dir_all(&path)?;
|
fs_err::create_dir_all(&path)?;
|
||||||
|
|
||||||
let auth_config = project.auth_config.clone();
|
let auth_config = project.auth_config.clone();
|
||||||
|
|
||||||
|
|
|
@ -253,7 +253,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
.join(pkg_ref.version.to_string())
|
.join(pkg_ref.version.to_string())
|
||||||
.join(pkg_ref.target.to_string());
|
.join(pkg_ref.target.to_string());
|
||||||
|
|
||||||
match std::fs::read_to_string(&index_file) {
|
match fs_err::read_to_string(&index_file) {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"using cached index file for package {}@{} {}",
|
"using cached index file for package {}@{} {}",
|
||||||
|
@ -320,10 +320,10 @@ impl PackageSource for PesdePackageSource {
|
||||||
let fs = PackageFS::CAS(entries);
|
let fs = PackageFS::CAS(entries);
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
std::fs::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
|
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(&index_file, toml::to_string(&fs)?)
|
fs_err::write(&index_file, toml::to_string(&fs)?)
|
||||||
.map_err(errors::DownloadError::WriteIndex)?;
|
.map_err(errors::DownloadError::WriteIndex)?;
|
||||||
|
|
||||||
Ok((fs, pkg_ref.target.clone()))
|
Ok((fs, pkg_ref.target.clone()))
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub(crate) fn get_target(
|
||||||
let build_files = Default::default();
|
let build_files = Default::default();
|
||||||
|
|
||||||
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
|
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
|
||||||
let manifest = std::fs::read_to_string(&manifest)?;
|
let manifest = fs_err::read_to_string(&manifest)?;
|
||||||
let manifest: WallyManifest = toml::from_str(&manifest)?;
|
let manifest: WallyManifest = toml::from_str(&manifest)?;
|
||||||
|
|
||||||
Ok(if matches!(manifest.package.realm, Realm::Shared) {
|
Ok(if matches!(manifest.package.realm, Realm::Shared) {
|
||||||
|
|
|
@ -151,7 +151,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
.join(pkg_ref.name.escaped())
|
.join(pkg_ref.name.escaped())
|
||||||
.join(pkg_ref.version.to_string());
|
.join(pkg_ref.version.to_string());
|
||||||
|
|
||||||
let tempdir = match std::fs::read_to_string(&index_file) {
|
let tempdir = match fs_err::read_to_string(&index_file) {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"using cached index file for package {}@{}",
|
"using cached index file for package {}@{}",
|
||||||
|
@ -198,7 +198,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
|
|
||||||
let mut entries = BTreeMap::new();
|
let mut entries = BTreeMap::new();
|
||||||
|
|
||||||
let mut dir_entries = std::fs::read_dir(tempdir.path())?.collect::<VecDeque<_>>();
|
let mut dir_entries = fs_err::read_dir(tempdir.path())?.collect::<VecDeque<_>>();
|
||||||
while let Some(entry) = dir_entries.pop_front() {
|
while let Some(entry) = dir_entries.pop_front() {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let path =
|
let path =
|
||||||
|
@ -210,7 +210,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
entries.insert(path, FSEntry::Directory);
|
entries.insert(path, FSEntry::Directory);
|
||||||
dir_entries.extend(std::fs::read_dir(entry.path())?);
|
dir_entries.extend(fs_err::read_dir(entry.path())?);
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -219,7 +219,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut file = std::fs::File::open(entry.path())?;
|
let mut file = fs_err::File::open(entry.path())?;
|
||||||
let hash = store_reader_in_cas(project.cas_dir(), &mut file)?;
|
let hash = store_reader_in_cas(project.cas_dir(), &mut file)?;
|
||||||
entries.insert(path, FSEntry::File(hash));
|
entries.insert(path, FSEntry::File(hash));
|
||||||
}
|
}
|
||||||
|
@ -227,10 +227,10 @@ impl PackageSource for WallyPackageSource {
|
||||||
let fs = PackageFS::CAS(entries);
|
let fs = PackageFS::CAS(entries);
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
std::fs::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
|
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(&index_file, toml::to_string(&fs)?)
|
fs_err::write(&index_file, toml::to_string(&fs)?)
|
||||||
.map_err(errors::DownloadError::WriteIndex)?;
|
.map_err(errors::DownloadError::WriteIndex)?;
|
||||||
|
|
||||||
Ok((fs, get_target(project, &tempdir)?))
|
Ok((fs, get_target(project, &tempdir)?))
|
||||||
|
|
Loading…
Reference in a new issue