mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-05 19:30:57 +01:00
refactor: patch improvements
Patches now work much better with incremental installs. They use a Git repository to reset the patch to the original state before applying it. They are now also properly cleaned up after being removed. Also, to allow more flexability the patching process is now done before type extracting, so that types can be patched. Lastly, the patch-commit command now doesn't delete the patch directory until it has done everything else to prevent data loss.
This commit is contained in:
parent
8bb8888de8
commit
1d131e98c6
6 changed files with 245 additions and 160 deletions
|
@ -8,6 +8,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Fixed
|
### Fixed
|
||||||
- Correct script linker require paths on Windows by @daimond113
|
- Correct script linker require paths on Windows by @daimond113
|
||||||
|
- Improve patches in incremental installs by @daimond113
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Patches are now applied before type extraction to allow patches to modify types by @daimond113
|
||||||
|
|
||||||
## [0.6.0-rc.4] - 2025-02-08
|
## [0.6.0-rc.4] - 2025-02-08
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
|
@ -59,9 +59,6 @@ impl PatchCommitCommand {
|
||||||
.context("failed to parse manifest")?;
|
.context("failed to parse manifest")?;
|
||||||
|
|
||||||
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
||||||
fs::remove_dir_all(self.directory)
|
|
||||||
.await
|
|
||||||
.context("failed to remove patch directory")?;
|
|
||||||
|
|
||||||
let patches_dir = project.package_dir().join("patches");
|
let patches_dir = project.package_dir().join("patches");
|
||||||
fs::create_dir_all(&patches_dir)
|
fs::create_dir_all(&patches_dir)
|
||||||
|
@ -75,9 +72,6 @@ impl PatchCommitCommand {
|
||||||
);
|
);
|
||||||
|
|
||||||
let patch_file = patches_dir.join(&patch_file_name);
|
let patch_file = patches_dir.join(&patch_file_name);
|
||||||
if patch_file.exists() {
|
|
||||||
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::write(&patch_file, patch)
|
fs::write(&patch_file, patch)
|
||||||
.await
|
.await
|
||||||
|
@ -92,6 +86,10 @@ impl PatchCommitCommand {
|
||||||
.await
|
.await
|
||||||
.context("failed to write manifest")?;
|
.context("failed to write manifest")?;
|
||||||
|
|
||||||
|
fs::remove_dir_all(self.directory)
|
||||||
|
.await
|
||||||
|
.context("failed to remove patch directory")?;
|
||||||
|
|
||||||
println!(concat!(
|
println!(concat!(
|
||||||
"done! run `",
|
"done! run `",
|
||||||
env!("CARGO_BIN_NAME"),
|
env!("CARGO_BIN_NAME"),
|
||||||
|
|
|
@ -300,12 +300,7 @@ pub async fn install(
|
||||||
.download_and_link(
|
.download_and_link(
|
||||||
&graph,
|
&graph,
|
||||||
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
|
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
|
||||||
.reporter(
|
.reporter(reporter)
|
||||||
#[cfg(feature = "patches")]
|
|
||||||
reporter.clone(),
|
|
||||||
#[cfg(not(feature = "patches"))]
|
|
||||||
reporter,
|
|
||||||
)
|
|
||||||
.hooks(hooks)
|
.hooks(hooks)
|
||||||
.refreshed_sources(refreshed_sources.clone())
|
.refreshed_sources(refreshed_sources.clone())
|
||||||
.prod(options.prod)
|
.prod(options.prod)
|
||||||
|
@ -315,18 +310,6 @@ pub async fn install(
|
||||||
.await
|
.await
|
||||||
.context("failed to download and link dependencies")?;
|
.context("failed to download and link dependencies")?;
|
||||||
|
|
||||||
#[cfg(feature = "patches")]
|
|
||||||
{
|
|
||||||
use pesde::graph::ConvertableGraph;
|
|
||||||
root_progress.reset();
|
|
||||||
root_progress.set_length(0);
|
|
||||||
root_progress.set_message("patch");
|
|
||||||
|
|
||||||
project
|
|
||||||
.apply_patches(&downloaded_graph.clone().convert(), reporter)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
{
|
{
|
||||||
let mut tasks = manifest
|
let mut tasks = manifest
|
||||||
|
|
|
@ -6,7 +6,7 @@ use crate::{
|
||||||
DependencyGraphWithTarget,
|
DependencyGraphWithTarget,
|
||||||
},
|
},
|
||||||
manifest::{target::TargetKind, DependencyType},
|
manifest::{target::TargetKind, DependencyType},
|
||||||
reporters::DownloadsReporter,
|
reporters::{DownloadsReporter, PatchesReporter},
|
||||||
source::{
|
source::{
|
||||||
ids::PackageId,
|
ids::PackageId,
|
||||||
traits::{GetTargetOptions, PackageRef, PackageSource},
|
traits::{GetTargetOptions, PackageRef, PackageSource},
|
||||||
|
@ -85,7 +85,7 @@ pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
|
||||||
|
|
||||||
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
|
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
|
||||||
where
|
where
|
||||||
Reporter: DownloadsReporter + Send + Sync + 'static,
|
Reporter: DownloadsReporter + PatchesReporter + Send + Sync + 'static,
|
||||||
Hooks: DownloadAndLinkHooks + Send + Sync + 'static,
|
Hooks: DownloadAndLinkHooks + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
/// Creates a new download options with the given reqwest client and reporter.
|
/// Creates a new download options with the given reqwest client and reporter.
|
||||||
|
@ -161,7 +161,7 @@ impl Project {
|
||||||
options: DownloadAndLinkOptions<Reporter, Hooks>,
|
options: DownloadAndLinkOptions<Reporter, Hooks>,
|
||||||
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
|
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
|
||||||
where
|
where
|
||||||
Reporter: DownloadsReporter + 'static,
|
Reporter: DownloadsReporter + PatchesReporter + 'static,
|
||||||
Hooks: DownloadAndLinkHooks + 'static,
|
Hooks: DownloadAndLinkHooks + 'static,
|
||||||
{
|
{
|
||||||
let DownloadAndLinkOptions {
|
let DownloadAndLinkOptions {
|
||||||
|
@ -203,13 +203,13 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
// step 1. download dependencies
|
// step 1. download dependencies
|
||||||
let downloaded_graph = {
|
let graph_to_download = {
|
||||||
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
|
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
|
||||||
.refreshed_sources(refreshed_sources.clone())
|
.refreshed_sources(refreshed_sources.clone())
|
||||||
.network_concurrency(network_concurrency);
|
.network_concurrency(network_concurrency);
|
||||||
|
|
||||||
if let Some(reporter) = reporter {
|
if let Some(reporter) = reporter.clone() {
|
||||||
download_graph_options = download_graph_options.reporter(reporter.clone());
|
download_graph_options = download_graph_options.reporter(reporter);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut downloaded_graph = DependencyGraph::new();
|
let mut downloaded_graph = DependencyGraph::new();
|
||||||
|
@ -273,9 +273,10 @@ impl Project {
|
||||||
downloaded_graph
|
downloaded_graph
|
||||||
};
|
};
|
||||||
|
|
||||||
let (downloaded_wally_graph, downloaded_other_graph) = downloaded_graph
|
let (wally_graph_to_download, other_graph_to_download) =
|
||||||
.into_iter()
|
graph_to_download
|
||||||
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
|
.into_iter()
|
||||||
|
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
|
||||||
|
|
||||||
let mut graph = Arc::new(DependencyGraphWithTarget::new());
|
let mut graph = Arc::new(DependencyGraphWithTarget::new());
|
||||||
|
|
||||||
|
@ -329,7 +330,7 @@ impl Project {
|
||||||
&mut graph,
|
&mut graph,
|
||||||
self,
|
self,
|
||||||
manifest.target.kind(),
|
manifest.target.kind(),
|
||||||
downloaded_other_graph,
|
other_graph_to_download,
|
||||||
)
|
)
|
||||||
.instrument(tracing::debug_span!("get targets (non-wally)"))
|
.instrument(tracing::debug_span!("get targets (non-wally)"))
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -355,11 +356,49 @@ impl Project {
|
||||||
&mut graph,
|
&mut graph,
|
||||||
self,
|
self,
|
||||||
manifest.target.kind(),
|
manifest.target.kind(),
|
||||||
downloaded_wally_graph,
|
wally_graph_to_download,
|
||||||
)
|
)
|
||||||
.instrument(tracing::debug_span!("get targets (wally)"))
|
.instrument(tracing::debug_span!("get targets (wally)"))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
#[cfg(feature = "patches")]
|
||||||
|
{
|
||||||
|
use crate::patches::apply_patch;
|
||||||
|
let mut tasks = manifest
|
||||||
|
.patches
|
||||||
|
.iter()
|
||||||
|
.flat_map(|(name, versions)| {
|
||||||
|
versions
|
||||||
|
.iter()
|
||||||
|
.map(|(v_id, path)| (PackageId::new(name.clone(), v_id.clone()), path))
|
||||||
|
})
|
||||||
|
.filter_map(|(id, patch_path)| graph.get(&id).map(|node| (id, node, patch_path)))
|
||||||
|
.map(|(id, node, patch_path)| {
|
||||||
|
let patch_path = patch_path.to_path(self.package_dir());
|
||||||
|
let container_folder =
|
||||||
|
node.node
|
||||||
|
.container_folder_from_project(&id, self, manifest.target.kind());
|
||||||
|
let reporter = reporter.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
match reporter {
|
||||||
|
Some(reporter) => {
|
||||||
|
apply_patch(&id, container_folder, &patch_path, reporter.clone())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
apply_patch(&id, container_folder, &patch_path, Arc::new(())).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
|
while let Some(task) = tasks.join_next().await {
|
||||||
|
task.unwrap()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// step 4. link ALL dependencies. do so with types
|
// step 4. link ALL dependencies. do so with types
|
||||||
self.link_dependencies(graph.clone(), true)
|
self.link_dependencies(graph.clone(), true)
|
||||||
.instrument(tracing::debug_span!("link (all)"))
|
.instrument(tracing::debug_span!("link (all)"))
|
||||||
|
@ -421,5 +460,10 @@ pub mod errors {
|
||||||
/// Removing unused dependencies failed
|
/// Removing unused dependencies failed
|
||||||
#[error("error removing unused dependencies")]
|
#[error("error removing unused dependencies")]
|
||||||
RemoveUnused(#[from] crate::linking::incremental::errors::RemoveUnusedError),
|
RemoveUnused(#[from] crate::linking::incremental::errors::RemoveUnusedError),
|
||||||
|
|
||||||
|
/// Patching a package failed
|
||||||
|
#[cfg(feature = "patches")]
|
||||||
|
#[error("error applying patch")]
|
||||||
|
Patch(#[from] crate::patches::errors::ApplyPatchError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
all_packages_dirs, graph::DependencyGraphWithTarget, manifest::Alias, util::remove_empty_dir,
|
all_packages_dirs, graph::DependencyGraphWithTarget, manifest::Alias, patches::remove_patch,
|
||||||
Project, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
|
source::ids::PackageId, util::remove_empty_dir, Project, PACKAGES_CONTAINER_NAME,
|
||||||
|
SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
path::{Path, PathBuf},
|
path::{Component, Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
|
@ -14,19 +15,33 @@ use tokio::task::JoinSet;
|
||||||
fn index_entry(
|
fn index_entry(
|
||||||
entry: fs::DirEntry,
|
entry: fs::DirEntry,
|
||||||
packages_index_dir: &Path,
|
packages_index_dir: &Path,
|
||||||
tasks: &mut JoinSet<std::io::Result<()>>,
|
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
|
||||||
used_paths: &Arc<HashSet<PathBuf>>,
|
used_paths: &Arc<HashSet<PathBuf>>,
|
||||||
|
patched_packages: &Arc<HashSet<PathBuf>>,
|
||||||
) {
|
) {
|
||||||
|
fn get_package_name_from_container(container: &Path) -> (bool, String) {
|
||||||
|
let Component::Normal(first_component) = container.components().next().unwrap() else {
|
||||||
|
panic!("invalid container path: `{}`", container.display());
|
||||||
|
};
|
||||||
|
|
||||||
|
let first_component = first_component.to_string_lossy();
|
||||||
|
let Some((name, _)) = first_component.split_once('@') else {
|
||||||
|
return (
|
||||||
|
false,
|
||||||
|
first_component.split_once('+').unwrap().1.to_string(),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
(true, name.split_once('_').unwrap().1.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let path_relative = path.strip_prefix(packages_index_dir).unwrap().to_path_buf();
|
let path_relative = path.strip_prefix(packages_index_dir).unwrap().to_path_buf();
|
||||||
|
|
||||||
let is_wally = entry
|
let (is_wally, package_name) = get_package_name_from_container(&path_relative);
|
||||||
.file_name()
|
|
||||||
.to_str()
|
|
||||||
.expect("non UTF-8 folder name in packages index")
|
|
||||||
.contains("@");
|
|
||||||
|
|
||||||
let used_paths = used_paths.clone();
|
let used_paths = used_paths.clone();
|
||||||
|
let patched_packages = patched_packages.clone();
|
||||||
tasks.spawn(async move {
|
tasks.spawn(async move {
|
||||||
if is_wally {
|
if is_wally {
|
||||||
#[cfg(not(feature = "wally-compat"))]
|
#[cfg(not(feature = "wally-compat"))]
|
||||||
|
@ -40,13 +55,15 @@ fn index_entry(
|
||||||
{
|
{
|
||||||
if !used_paths.contains(&path_relative) {
|
if !used_paths.contains(&path_relative) {
|
||||||
fs::remove_dir_all(path).await?;
|
fs::remove_dir_all(path).await?;
|
||||||
|
} else if !patched_packages.contains(&path_relative) {
|
||||||
|
remove_patch(path.join(package_name)).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut tasks = JoinSet::new();
|
let mut tasks = JoinSet::<Result<_, errors::RemoveUnusedError>>::new();
|
||||||
|
|
||||||
let mut entries = fs::read_dir(&path).await?;
|
let mut entries = fs::read_dir(&path).await?;
|
||||||
while let Some(entry) = entries.next_entry().await? {
|
while let Some(entry) = entries.next_entry().await? {
|
||||||
|
@ -54,24 +71,28 @@ fn index_entry(
|
||||||
let path_relative = path_relative.join(&version);
|
let path_relative = path_relative.join(&version);
|
||||||
|
|
||||||
if used_paths.contains(&path_relative) {
|
if used_paths.contains(&path_relative) {
|
||||||
|
if !patched_packages.contains(&path_relative) {
|
||||||
|
let path = entry.path().join(&package_name);
|
||||||
|
tasks.spawn(async { remove_patch(path).await.map_err(Into::into) });
|
||||||
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
tasks.spawn(async { fs::remove_dir_all(path).await });
|
tasks.spawn(async { fs::remove_dir_all(path).await.map_err(Into::into) });
|
||||||
}
|
}
|
||||||
|
|
||||||
while let Some(task) = tasks.join_next().await {
|
while let Some(task) = tasks.join_next().await {
|
||||||
task.unwrap()?;
|
task.unwrap()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
remove_empty_dir(&path).await
|
remove_empty_dir(&path).await.map_err(Into::into)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn packages_entry(
|
fn packages_entry(
|
||||||
entry: fs::DirEntry,
|
entry: fs::DirEntry,
|
||||||
tasks: &mut JoinSet<std::io::Result<()>>,
|
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
|
||||||
expected_aliases: &Arc<HashSet<Alias>>,
|
expected_aliases: &Arc<HashSet<Alias>>,
|
||||||
) {
|
) {
|
||||||
let expected_aliases = expected_aliases.clone();
|
let expected_aliases = expected_aliases.clone();
|
||||||
|
@ -105,7 +126,7 @@ fn packages_entry(
|
||||||
|
|
||||||
fn scripts_entry(
|
fn scripts_entry(
|
||||||
entry: fs::DirEntry,
|
entry: fs::DirEntry,
|
||||||
tasks: &mut JoinSet<std::io::Result<()>>,
|
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
|
||||||
expected_aliases: &Arc<HashSet<Alias>>,
|
expected_aliases: &Arc<HashSet<Alias>>,
|
||||||
) {
|
) {
|
||||||
let expected_aliases = expected_aliases.clone();
|
let expected_aliases = expected_aliases.clone();
|
||||||
|
@ -154,6 +175,24 @@ impl Project {
|
||||||
})
|
})
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
let used_paths = Arc::new(used_paths);
|
let used_paths = Arc::new(used_paths);
|
||||||
|
let patched_packages = manifest
|
||||||
|
.patches
|
||||||
|
.iter()
|
||||||
|
.flat_map(|(name, versions)| {
|
||||||
|
versions
|
||||||
|
.iter()
|
||||||
|
.map(|(v_id, _)| PackageId::new(name.clone(), v_id.clone()))
|
||||||
|
})
|
||||||
|
.filter_map(|id| graph.get(&id).map(|node| (id, node)))
|
||||||
|
.map(|(id, node)| {
|
||||||
|
node.node
|
||||||
|
.container_folder(&id)
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf()
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
let patched_packages = Arc::new(dbg!(patched_packages));
|
||||||
|
|
||||||
let mut tasks = all_packages_dirs()
|
let mut tasks = all_packages_dirs()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -161,6 +200,7 @@ impl Project {
|
||||||
let packages_dir = self.package_dir().join(&folder);
|
let packages_dir = self.package_dir().join(&folder);
|
||||||
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
|
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
|
||||||
let used_paths = used_paths.clone();
|
let used_paths = used_paths.clone();
|
||||||
|
let patched_packages = patched_packages.clone();
|
||||||
|
|
||||||
let expected_aliases = graph
|
let expected_aliases = graph
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -181,7 +221,7 @@ impl Project {
|
||||||
let mut index_entries = match fs::read_dir(&packages_index_dir).await {
|
let mut index_entries = match fs::read_dir(&packages_index_dir).await {
|
||||||
Ok(entries) => entries,
|
Ok(entries) => entries,
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e.into()),
|
||||||
};
|
};
|
||||||
// we don't handle NotFound here because the upper level will handle it
|
// we don't handle NotFound here because the upper level will handle it
|
||||||
let mut packages_entries = fs::read_dir(&packages_dir).await?;
|
let mut packages_entries = fs::read_dir(&packages_dir).await?;
|
||||||
|
@ -195,6 +235,7 @@ impl Project {
|
||||||
&packages_index_dir,
|
&packages_index_dir,
|
||||||
&mut tasks,
|
&mut tasks,
|
||||||
&used_paths,
|
&used_paths,
|
||||||
|
&patched_packages,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
|
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
|
||||||
|
@ -215,7 +256,7 @@ impl Project {
|
||||||
remove_empty_dir(&packages_index_dir).await?;
|
remove_empty_dir(&packages_index_dir).await?;
|
||||||
remove_empty_dir(&packages_dir).await?;
|
remove_empty_dir(&packages_dir).await?;
|
||||||
|
|
||||||
Ok::<_, std::io::Error>(())
|
Ok::<_, errors::RemoveUnusedError>(())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<JoinSet<_>>();
|
.collect::<JoinSet<_>>();
|
||||||
|
@ -267,5 +308,10 @@ pub mod errors {
|
||||||
/// IO error
|
/// IO error
|
||||||
#[error("IO error")]
|
#[error("IO error")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
/// Removing a patch failed
|
||||||
|
#[cfg(feature = "patches")]
|
||||||
|
#[error("error removing patch")]
|
||||||
|
PatchRemove(#[from] crate::patches::errors::ApplyPatchError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
228
src/patches.rs
228
src/patches.rs
|
@ -1,16 +1,17 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
graph::DependencyGraph,
|
|
||||||
reporters::{PatchProgressReporter, PatchesReporter},
|
reporters::{PatchProgressReporter, PatchesReporter},
|
||||||
source::ids::PackageId,
|
source::ids::PackageId,
|
||||||
Project, MANIFEST_FILE_NAME,
|
MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::TryFutureExt;
|
use futures::TryFutureExt;
|
||||||
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
|
||||||
use relative_path::RelativePathBuf;
|
use std::{
|
||||||
use std::{path::Path, sync::Arc};
|
path::{Path, PathBuf},
|
||||||
use tokio::task::JoinSet;
|
sync::Arc,
|
||||||
use tracing::{instrument, Instrument};
|
};
|
||||||
|
use tokio::task::{spawn_blocking, JoinSet};
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
/// Set up a git repository for patches
|
/// Set up a git repository for patches
|
||||||
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
||||||
|
@ -43,7 +44,7 @@ pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Er
|
||||||
|
|
||||||
/// Create a patch from the current state of the repository
|
/// Create a patch from the current state of the repository
|
||||||
pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
||||||
let mut patches = vec![];
|
let mut patch = vec![];
|
||||||
let repo = Repository::open(dir.as_ref())?;
|
let repo = Repository::open(dir.as_ref())?;
|
||||||
|
|
||||||
let original = repo.head()?.peel_to_tree()?;
|
let original = repo.head()?.peel_to_tree()?;
|
||||||
|
@ -54,7 +55,12 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
||||||
checkout_builder.path(MANIFEST_FILE_NAME);
|
checkout_builder.path(MANIFEST_FILE_NAME);
|
||||||
repo.checkout_tree(original.as_object(), Some(&mut checkout_builder))?;
|
repo.checkout_tree(original.as_object(), Some(&mut checkout_builder))?;
|
||||||
|
|
||||||
let diff = repo.diff_tree_to_workdir(Some(&original), None)?;
|
// TODO: despite all the options, this still doesn't include untracked files
|
||||||
|
let mut diff_options = git2::DiffOptions::default();
|
||||||
|
diff_options.include_untracked(true);
|
||||||
|
diff_options.recurse_untracked_dirs(true);
|
||||||
|
|
||||||
|
let diff = repo.diff_tree_to_workdir(Some(&original), Some(&mut diff_options))?;
|
||||||
|
|
||||||
diff.print(DiffFormat::Patch, |_delta, _hunk, line| {
|
diff.print(DiffFormat::Patch, |_delta, _hunk, line| {
|
||||||
if matches!(
|
if matches!(
|
||||||
|
@ -64,119 +70,127 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
|
||||||
let origin = line.origin();
|
let origin = line.origin();
|
||||||
let mut buffer = vec![0; origin.len_utf8()];
|
let mut buffer = vec![0; origin.len_utf8()];
|
||||||
origin.encode_utf8(&mut buffer);
|
origin.encode_utf8(&mut buffer);
|
||||||
patches.extend(buffer);
|
patch.extend(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
patches.extend(line.content());
|
patch.extend(line.content());
|
||||||
|
|
||||||
true
|
true
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(patches)
|
Ok(patch)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
// unlike a simple hard reset, this will also remove untracked files
|
||||||
/// Apply patches to the project's dependencies
|
fn reset_repo(repo: &Repository) -> Result<(), git2::Error> {
|
||||||
#[instrument(skip(self, graph, reporter), level = "debug")]
|
let mut checkout_builder = git2::build::CheckoutBuilder::new();
|
||||||
pub async fn apply_patches<Reporter>(
|
checkout_builder.force();
|
||||||
&self,
|
checkout_builder.remove_untracked(true);
|
||||||
graph: &DependencyGraph,
|
repo.checkout_head(Some(&mut checkout_builder))?;
|
||||||
reporter: Arc<Reporter>,
|
|
||||||
) -> Result<(), errors::ApplyPatchesError>
|
|
||||||
where
|
|
||||||
Reporter: PatchesReporter + Send + Sync + 'static,
|
|
||||||
{
|
|
||||||
let manifest = self.deser_manifest().await?;
|
|
||||||
|
|
||||||
let mut tasks = manifest
|
Ok(())
|
||||||
.patches
|
}
|
||||||
.into_iter()
|
|
||||||
.flat_map(|(name, versions)| {
|
|
||||||
versions.into_iter().map(move |(version_id, patch_path)| {
|
|
||||||
(PackageId::new(name.clone(), version_id), patch_path)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.filter_map(|(package_id, patch_path)| match graph.get(&package_id) {
|
|
||||||
Some(node) => Some((package_id, patch_path, node)),
|
|
||||||
None => {
|
|
||||||
tracing::warn!(
|
|
||||||
"patch for {package_id} not applied because it is not in the graph"
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(|(package_id, patch_path, node)| {
|
|
||||||
let patch_path = patch_path.to_path(self.package_dir());
|
|
||||||
let span = tracing::info_span!("apply patch", package_id = package_id.to_string());
|
|
||||||
let container_folder =
|
|
||||||
node.container_folder_from_project(&package_id, self, manifest.target.kind());
|
|
||||||
let reporter = reporter.clone();
|
|
||||||
|
|
||||||
async move {
|
/// Apply a patch to a dependency
|
||||||
tracing::debug!("applying patch");
|
#[instrument(skip(container_folder, patch_path, reporter), level = "debug")]
|
||||||
|
pub async fn apply_patch<Reporter>(
|
||||||
|
package_id: &PackageId,
|
||||||
|
container_folder: PathBuf,
|
||||||
|
patch_path: &Path,
|
||||||
|
reporter: Arc<Reporter>,
|
||||||
|
) -> Result<(), errors::ApplyPatchError>
|
||||||
|
where
|
||||||
|
Reporter: PatchesReporter + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
let dot_git = container_folder.join(".git");
|
||||||
|
|
||||||
let progress_reporter = reporter.report_patch(package_id.to_string());
|
tracing::debug!("applying patch");
|
||||||
|
|
||||||
let patch = fs::read(&patch_path)
|
let progress_reporter = reporter.report_patch(package_id.to_string());
|
||||||
.await
|
|
||||||
.map_err(errors::ApplyPatchesError::PatchRead)?;
|
|
||||||
let patch = Diff::from_buffer(&patch)?;
|
|
||||||
|
|
||||||
{
|
let patch = fs::read(&patch_path)
|
||||||
let repo = setup_patches_repo(&container_folder)?;
|
.await
|
||||||
|
.map_err(errors::ApplyPatchError::PatchRead)?;
|
||||||
|
let patch = spawn_blocking(move || Diff::from_buffer(&patch))
|
||||||
|
.await
|
||||||
|
.unwrap()?;
|
||||||
|
|
||||||
let mut apply_delta_tasks = patch
|
let mut apply_delta_tasks = patch
|
||||||
.deltas()
|
.deltas()
|
||||||
.filter(|delta| matches!(delta.status(), git2::Delta::Modified))
|
.filter(|delta| matches!(delta.status(), git2::Delta::Modified))
|
||||||
.filter_map(|delta| delta.new_file().path())
|
.filter_map(|delta| delta.new_file().path())
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
RelativePathBuf::from_path(path)
|
let path = container_folder.join(path);
|
||||||
.unwrap()
|
|
||||||
.to_path(&container_folder)
|
|
||||||
})
|
|
||||||
.map(|path| {
|
|
||||||
async {
|
|
||||||
if !fs::metadata(&path).await?.is_file() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevent CAS corruption by the file being modified
|
async {
|
||||||
let content = fs::read(&path).await?;
|
// prevent CAS corruption by the file being modified
|
||||||
fs::remove_file(&path).await?;
|
let content = match fs::read(&path).await {
|
||||||
fs::write(path, content).await?;
|
Ok(content) => content,
|
||||||
Ok(())
|
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => return Ok(()),
|
||||||
}
|
Err(e) => return Err(e),
|
||||||
.map_err(errors::ApplyPatchesError::File)
|
};
|
||||||
})
|
fs::remove_file(&path).await?;
|
||||||
.collect::<JoinSet<_>>();
|
fs::write(path, content).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.map_err(errors::ApplyPatchError::File)
|
||||||
|
})
|
||||||
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
while let Some(res) = apply_delta_tasks.join_next().await {
|
while let Some(res) = apply_delta_tasks.join_next().await {
|
||||||
res.unwrap()?;
|
res.unwrap()?;
|
||||||
}
|
|
||||||
|
|
||||||
repo.apply(&patch, ApplyLocation::WorkDir, None)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!("patch applied");
|
|
||||||
|
|
||||||
fs::remove_dir_all(container_folder.join(".git"))
|
|
||||||
.await
|
|
||||||
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
|
|
||||||
|
|
||||||
progress_reporter.report_done();
|
|
||||||
|
|
||||||
Ok::<_, errors::ApplyPatchesError>(())
|
|
||||||
}
|
|
||||||
.instrument(span)
|
|
||||||
})
|
|
||||||
.collect::<JoinSet<_>>();
|
|
||||||
|
|
||||||
while let Some(res) = tasks.join_next().await {
|
|
||||||
res.unwrap()?
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
spawn_blocking(move || {
|
||||||
|
let repo = if dot_git.exists() {
|
||||||
|
let repo = Repository::open(&container_folder)?;
|
||||||
|
reset_repo(&repo)?;
|
||||||
|
repo
|
||||||
|
} else {
|
||||||
|
setup_patches_repo(&container_folder)?
|
||||||
|
};
|
||||||
|
|
||||||
|
repo.apply(&patch, ApplyLocation::WorkDir, None)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()?;
|
||||||
|
|
||||||
|
tracing::debug!("patch applied");
|
||||||
|
|
||||||
|
progress_reporter.report_done();
|
||||||
|
|
||||||
|
Ok::<_, errors::ApplyPatchError>(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove a patch from a dependency
|
||||||
|
#[instrument(level = "debug")]
|
||||||
|
pub async fn remove_patch(container_folder: PathBuf) -> Result<(), errors::ApplyPatchError> {
|
||||||
|
let dot_git = container_folder.join(".git");
|
||||||
|
|
||||||
|
tracing::debug!("removing patch");
|
||||||
|
|
||||||
|
if dbg!(fs::metadata(&dot_git).await).is_err() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
spawn_blocking(move || {
|
||||||
|
let repo = Repository::open(&container_folder)?;
|
||||||
|
reset_repo(&repo)?;
|
||||||
|
|
||||||
|
Ok::<_, git2::Error>(())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()?;
|
||||||
|
|
||||||
|
match fs::remove_dir_all(&dot_git).await {
|
||||||
|
Ok(()) => (),
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => (),
|
||||||
|
Err(e) => return Err(errors::ApplyPatchError::File(e)),
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::debug!("patch removed");
|
||||||
|
|
||||||
|
Ok::<_, errors::ApplyPatchError>(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when using patches
|
/// Errors that can occur when using patches
|
||||||
|
@ -186,11 +200,7 @@ pub mod errors {
|
||||||
/// Errors that can occur when applying patches
|
/// Errors that can occur when applying patches
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ApplyPatchesError {
|
pub enum ApplyPatchError {
|
||||||
/// Error deserializing the project manifest
|
|
||||||
#[error("error deserializing project manifest")]
|
|
||||||
ManifestDeserializationFailed(#[from] crate::errors::ManifestReadError),
|
|
||||||
|
|
||||||
/// Error interacting with git
|
/// Error interacting with git
|
||||||
#[error("error interacting with git")]
|
#[error("error interacting with git")]
|
||||||
Git(#[from] git2::Error),
|
Git(#[from] git2::Error),
|
||||||
|
|
Loading…
Add table
Reference in a new issue