feat: update instead of recreating packages folders

Instead of recreating the packages folders, we now
update the existing ones. Additionally switches
a few APIs from accepting `&TargetKind` to `TargetKind`.
This commit is contained in:
daimond113 2025-01-18 14:14:09 +01:00
parent 53bdf0ced6
commit a2ce747879
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
14 changed files with 355 additions and 88 deletions

View file

@ -68,7 +68,7 @@ pub fn resolve_version_and_target(
.filter(|(v_id, _)| *v_id.version() == version); .filter(|(v_id, _)| *v_id.version() == version);
match target { match target {
AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| *v_id.target()), AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| v_id.target()),
AnyOrSpecificTarget::Specific(kind) => { AnyOrSpecificTarget::Specific(kind) => {
versions.find(|(_, entry)| entry.target.kind() == kind) versions.find(|(_, entry)| entry.target.kind() == kind)
} }

View file

@ -209,7 +209,7 @@ impl AddCommand {
field["name"] = toml_edit::value(spec.name.clone().to_string()); field["name"] = toml_edit::value(spec.name.clone().to_string());
field["version"] = toml_edit::value(format!("^{}", version_id.version())); field["version"] = toml_edit::value(format!("^{}", version_id.version()));
if *version_id.target() != project_target { if version_id.target() != project_target {
field["target"] = toml_edit::value(version_id.target().to_string()); field["target"] = toml_edit::value(version_id.target().to_string());
} }

View file

@ -264,7 +264,7 @@ impl InitCommand {
field["version"] = toml_edit::value(spec.version.to_string()); field["version"] = toml_edit::value(spec.version.to_string());
field["target"] = toml_edit::value( field["target"] = toml_edit::value(
spec.target spec.target
.unwrap_or_else(|| *id.version_id().target()) .unwrap_or_else(|| id.version_id().target())
.to_string(), .to_string(),
); );
} }

View file

@ -19,6 +19,10 @@ pub struct InstallCommand {
/// The maximum number of concurrent network requests /// The maximum number of concurrent network requests
#[arg(long, default_value = "16")] #[arg(long, default_value = "16")]
network_concurrency: NonZeroUsize, network_concurrency: NonZeroUsize,
/// Whether to re-install all dependencies even if they are already installed
#[arg(long)]
force: bool,
} }
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -32,6 +36,7 @@ impl InstallCommand {
write: true, write: true,
network_concurrency: self.network_concurrency, network_concurrency: self.network_concurrency,
use_lockfile: true, use_lockfile: true,
force: self.force,
}; };
install(&options, &project, reqwest.clone(), true).await?; install(&options, &project, reqwest.clone(), true).await?;

View file

@ -15,6 +15,10 @@ pub struct UpdateCommand {
/// The maximum number of concurrent network requests /// The maximum number of concurrent network requests
#[arg(long, default_value = "16")] #[arg(long, default_value = "16")]
network_concurrency: NonZeroUsize, network_concurrency: NonZeroUsize,
/// Whether to re-install all dependencies even if they are already installed
#[arg(long)]
force: bool,
} }
impl UpdateCommand { impl UpdateCommand {
@ -25,6 +29,7 @@ impl UpdateCommand {
write: !self.no_install, write: !self.no_install,
network_concurrency: self.network_concurrency, network_concurrency: self.network_concurrency,
use_lockfile: false, use_lockfile: false,
force: self.force,
}; };
install(&options, &project, reqwest.clone(), true).await?; install(&options, &project, reqwest.clone(), true).await?;

View file

@ -30,7 +30,7 @@ fn bin_link_file(alias: &Alias) -> String {
for a in TargetKind::VARIANTS { for a in TargetKind::VARIANTS {
for b in TargetKind::VARIANTS { for b in TargetKind::VARIANTS {
all_combinations.insert((a, b)); all_combinations.insert((*a, *b));
} }
} }
@ -136,6 +136,7 @@ pub struct InstallOptions {
pub write: bool, pub write: bool,
pub use_lockfile: bool, pub use_lockfile: bool,
pub network_concurrency: NonZeroUsize, pub network_concurrency: NonZeroUsize,
pub force: bool,
} }
pub async fn install( pub async fn install(
@ -153,6 +154,8 @@ pub async fn install(
.await .await
.context("failed to read manifest")?; .context("failed to read manifest")?;
let mut has_irrecoverable_changes = false;
let lockfile = if options.locked { let lockfile = if options.locked {
match up_to_date_lockfile(project).await? { match up_to_date_lockfile(project).await? {
None => { None => {
@ -168,9 +171,11 @@ pub async fn install(
Ok(lockfile) => { Ok(lockfile) => {
if lockfile.overrides != resolve_overrides(&manifest)? { if lockfile.overrides != resolve_overrides(&manifest)? {
tracing::debug!("overrides are different"); tracing::debug!("overrides are different");
has_irrecoverable_changes = true;
None None
} else if lockfile.target != manifest.target.kind() { } else if lockfile.target != manifest.target.kind() {
tracing::debug!("target kind is different"); tracing::debug!("target kind is different");
has_irrecoverable_changes = true;
None None
} else { } else {
Some(lockfile) Some(lockfile)
@ -195,6 +200,7 @@ pub async fn install(
root_progress.set_prefix(format!("{} {}: ", manifest.name, manifest.target)); root_progress.set_prefix(format!("{} {}: ", manifest.name, manifest.target));
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
{ {
root_progress.reset();
root_progress.set_message("update engine linkers"); root_progress.set_message("update engine linkers");
let mut tasks = manifest let mut tasks = manifest
@ -208,39 +214,6 @@ pub async fn install(
} }
} }
root_progress.set_message("clean");
if options.write {
let mut deleted_folders = HashMap::new();
for target_kind in TargetKind::VARIANTS {
let folder = manifest.target.kind().packages_folder(target_kind);
let package_dir = project.package_dir().to_path_buf();
deleted_folders
.entry(folder.to_string())
.or_insert_with(|| async move {
tracing::debug!("deleting the {folder} folder");
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
.await
.err()
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
{
return Err(e)
.context(format!("failed to remove the {folder} folder"));
};
Ok(())
});
}
let mut tasks = deleted_folders.into_values().collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
root_progress.reset(); root_progress.reset();
root_progress.set_message("resolve"); root_progress.set_message("resolve");
@ -315,7 +288,8 @@ pub async fn install(
.hooks(hooks) .hooks(hooks)
.refreshed_sources(refreshed_sources) .refreshed_sources(refreshed_sources)
.prod(options.prod) .prod(options.prod)
.network_concurrency(options.network_concurrency), .network_concurrency(options.network_concurrency)
.force(options.force || has_irrecoverable_changes),
) )
.await .await
.context("failed to download and link dependencies")?; .context("failed to download and link dependencies")?;
@ -428,6 +402,7 @@ pub async fn install(
} }
} }
root_progress.reset();
root_progress.set_message("finish"); root_progress.set_message("finish");
let new_lockfile = Lockfile { let new_lockfile = Lockfile {

View file

@ -4,21 +4,22 @@ use crate::{
DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget, DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget,
DependencyGraphWithTarget, DependencyGraphWithTarget,
}, },
manifest::{target::TargetKind, DependencyType}, manifest::{target::TargetKind, Alias, DependencyType},
reporters::DownloadsReporter, reporters::DownloadsReporter,
source::{ source::{
ids::PackageId, ids::PackageId,
traits::{GetTargetOptions, PackageRef, PackageSource}, traits::{GetTargetOptions, PackageRef, PackageSource},
}, },
Project, RefreshedSources, Project, RefreshedSources, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::TryStreamExt; use futures::{FutureExt, TryStreamExt};
use std::{ use std::{
collections::{BTreeMap, HashMap}, collections::{HashMap, HashSet},
convert::Infallible, convert::Infallible,
future::{self, Future}, future::{self, Future},
num::NonZeroUsize, num::NonZeroUsize,
path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use tokio::{pin, task::JoinSet}; use tokio::{pin, task::JoinSet};
@ -77,6 +78,8 @@ pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
pub prod: bool, pub prod: bool,
/// The max number of concurrent network requests. /// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize, pub network_concurrency: NonZeroUsize,
/// Whether to re-install all dependencies even if they are already installed
pub force: bool,
} }
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks> impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
@ -93,6 +96,7 @@ where
refreshed_sources: Default::default(), refreshed_sources: Default::default(),
prod: false, prod: false,
network_concurrency: NonZeroUsize::new(16).unwrap(), network_concurrency: NonZeroUsize::new(16).unwrap(),
force: false,
} }
} }
@ -125,6 +129,12 @@ where
self.network_concurrency = network_concurrency; self.network_concurrency = network_concurrency;
self self
} }
/// Sets whether to re-install all dependencies even if they are already installed
pub fn force(mut self, force: bool) -> Self {
self.force = force;
self
}
} }
impl Clone for DownloadAndLinkOptions { impl Clone for DownloadAndLinkOptions {
@ -136,10 +146,21 @@ impl Clone for DownloadAndLinkOptions {
refreshed_sources: self.refreshed_sources.clone(), refreshed_sources: self.refreshed_sources.clone(),
prod: self.prod, prod: self.prod,
network_concurrency: self.network_concurrency, network_concurrency: self.network_concurrency,
force: self.force,
} }
} }
} }
fn all_packages_dirs() -> HashSet<String> {
let mut dirs = HashSet::new();
for target_kind_a in TargetKind::VARIANTS {
for target_kind_b in TargetKind::VARIANTS {
dirs.insert(target_kind_a.packages_folder(*target_kind_b));
}
}
dirs
}
impl Project { impl Project {
/// Downloads a graph of dependencies and links them in the correct order /// Downloads a graph of dependencies and links them in the correct order
#[instrument(skip_all, fields(prod = options.prod), level = "debug")] #[instrument(skip_all, fields(prod = options.prod), level = "debug")]
@ -159,16 +180,50 @@ impl Project {
refreshed_sources, refreshed_sources,
prod, prod,
network_concurrency, network_concurrency,
force,
} = options; } = options;
let graph = graph.clone(); let graph = graph.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
let manifest = self.deser_manifest().await?; let manifest = self.deser_manifest().await?;
if force {
let mut deleted_folders = HashMap::new();
async fn remove_dir(package_dir: PathBuf, folder: String) -> std::io::Result<()> {
tracing::debug!("force deleting the {folder} folder");
match fs::remove_dir_all(package_dir.join(&folder)).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) => Err(e),
}
}
for folder in all_packages_dirs() {
let package_dir = self.package_dir().to_path_buf();
deleted_folders
.entry(folder.to_string())
.or_insert_with(|| remove_dir(package_dir, folder));
}
deleted_folders.insert(
SCRIPTS_LINK_FOLDER.to_string(),
remove_dir(
self.package_dir().to_path_buf(),
SCRIPTS_LINK_FOLDER.to_string(),
),
);
let mut tasks = deleted_folders.into_values().collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
// step 1. download dependencies // step 1. download dependencies
let downloaded_graph = { let downloaded_graph = {
let mut downloaded_graph = BTreeMap::new();
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone()) let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
.refreshed_sources(refreshed_sources.clone()) .refreshed_sources(refreshed_sources.clone())
.network_concurrency(network_concurrency); .network_concurrency(network_concurrency);
@ -177,8 +232,41 @@ impl Project {
download_graph_options = download_graph_options.reporter(reporter.clone()); download_graph_options = download_graph_options.reporter(reporter.clone());
} }
let mut downloaded_graph = DependencyGraph::new();
let graph_to_download = if force {
graph.clone()
} else {
let mut tasks = graph
.iter()
.map(|(id, node)| {
let id = id.clone();
let node = node.clone();
let container_folder =
node.container_folder_from_project(&id, self, manifest.target.kind());
async move {
return (id, node, fs::metadata(&container_folder).await.is_ok());
}
})
.collect::<JoinSet<_>>();
let mut graph_to_download = DependencyGraph::new();
while let Some(task) = tasks.join_next().await {
let (id, node, installed) = task.unwrap();
if installed {
downloaded_graph.insert(id, node);
continue;
}
graph_to_download.insert(id, node);
}
Arc::new(graph_to_download)
};
let downloaded = self let downloaded = self
.download_graph(&graph, download_graph_options.clone()) .download_graph(&graph_to_download, download_graph_options.clone())
.instrument(tracing::debug_span!("download")) .instrument(tracing::debug_span!("download"))
.await?; .await?;
pin!(downloaded); pin!(downloaded);
@ -305,6 +393,7 @@ impl Project {
} }
let mut graph = Arc::into_inner(graph).unwrap(); let mut graph = Arc::into_inner(graph).unwrap();
let manifest = Arc::new(manifest);
if prod { if prod {
let (dev_graph, prod_graph) = graph let (dev_graph, prod_graph) = graph
@ -316,28 +405,189 @@ impl Project {
graph = prod_graph; graph = prod_graph;
let dev_graph = Arc::new(dev_graph); let dev_graph = Arc::new(dev_graph);
let manifest_target_kind = manifest.target.kind();
// the `true` argument means it'll remove the dependencies linkers // the `true` argument means it'll remove the dependencies linkers
self.link( self.link(
&dev_graph, &dev_graph,
&Arc::new(manifest), &manifest,
&Arc::new(Default::default()), &Arc::new(Default::default()),
false, false,
true, true,
) )
.await?; .await?;
}
let mut tasks = dev_graph if !force {
.iter() let used_paths = Arc::new(
.map(|(id, node)| { graph
let container_folder = .iter()
.filter(|(_, node)| !node.node.pkg_ref.is_wally_package())
.map(|(id, node)| {
node.node node.node
.container_folder_from_project(id, self, manifest_target_kind); .container_folder(id)
.version_folder()
.to_path_buf()
})
.collect::<HashSet<_>>(),
);
#[cfg(feature = "wally-compat")]
let used_wally_paths = Arc::new(
graph
.iter()
.filter(|(_, node)| node.node.pkg_ref.is_wally_package())
.map(|(id, node)| {
node.node
.container_folder(id)
.version_folder()
.to_path_buf()
})
.collect::<HashSet<_>>(),
);
async fn remove_empty_dir(path: &Path) -> std::io::Result<()> {
match fs::remove_dir(path).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(e) => Err(e),
}
}
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| {
let packages_dir = self.package_dir().join(&folder);
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
let used_paths = used_paths.clone();
#[cfg(feature = "wally-compat")]
let used_wally_paths = used_wally_paths.clone();
let expected_aliases = graph.iter()
.filter(|(id, _)| manifest.target.kind().packages_folder(id.version_id().target()) == folder)
.filter_map(|(_, node)| node.node.direct.as_ref().map(|(alias, _, _)| alias.clone()))
.collect::<HashSet<_>>();
async move { async move {
fs::remove_dir_all(&container_folder) let mut index_entries = match fs::read_dir(&packages_index_dir).await {
.await Ok(entries) => entries,
.map_err(errors::DownloadAndLinkError::Io) Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(e),
};
// we don't handle NotFound here because the upper level will handle it
let mut packages_entries = fs::read_dir(&packages_dir).await?;
let mut tasks = JoinSet::new();
async fn index_entry(
entry: fs::DirEntry,
packages_index_dir: &Path,
tasks: &mut JoinSet<std::io::Result<()>>,
used_paths: &HashSet<PathBuf>,
#[cfg(feature = "wally-compat")] used_wally_paths: &HashSet<PathBuf>,
) -> std::io::Result<()> {
let path = entry.path();
let path_relative = path.strip_prefix(packages_index_dir).unwrap();
let is_wally = entry.file_name().to_str().expect("non UTF-8 folder name in packages index").contains("@");
if is_wally {
#[cfg(feature = "wally-compat")]
if !used_wally_paths.contains(path_relative) {
tasks.spawn(async {
fs::remove_dir_all(path).await
});
}
#[cfg(not(feature = "wally-compat"))]
{
tracing::error!("found Wally package in index despite feature being disabled at `{}`", path.display());
}
return Ok(());
}
let mut tasks = JoinSet::new();
let mut entries = fs::read_dir(&path).await?;
while let Some(entry) = entries.next_entry().await? {
let version = entry.file_name();
let path_relative = path_relative.join(&version);
if used_paths.contains(&path_relative) {
continue;
}
let path = entry.path();
tasks.spawn(async {
fs::remove_dir_all(path).await
});
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&path).await
}
async fn packages_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<std::io::Result<()>>,
expected_aliases: &HashSet<Alias>,
) -> std::io::Result<()> {
if !entry.file_type().await?.is_file() {
return Ok(());
}
let path = entry.path();
let name= path.file_stem().unwrap().to_str().expect("non UTF-8 file name in packages folder");
let name = name.strip_suffix(".bin").unwrap_or(name);
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in packages folder: {e}");
return Ok(())
},
};
if !expected_aliases.contains(&name) {
tasks.spawn(async {
fs::remove_file(path).await
});
}
Ok(())
}
loop {
tokio::select! {
Some(entry) = index_entries.next_entry().map(Result::transpose) => {
index_entry(
entry?,
&packages_index_dir,
&mut tasks,
&used_paths,
#[cfg(feature = "wally-compat")]
&used_wally_paths,
).await?;
}
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry(
entry?,
&mut tasks,
&expected_aliases,
).await?;
}
else => break,
};
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&packages_index_dir).await?;
remove_empty_dir(&packages_dir).await?;
Ok::<_, std::io::Error>(())
} }
}) })
.collect::<JoinSet<_>>(); .collect::<JoinSet<_>>();

View file

@ -12,7 +12,10 @@ use crate::{
Project, PACKAGES_CONTAINER_NAME, Project, PACKAGES_CONTAINER_NAME,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, path::PathBuf}; use std::{
collections::BTreeMap,
path::{Path, PathBuf},
};
/// A graph of dependencies /// A graph of dependencies
pub type Graph<Node> = BTreeMap<PackageId, Node>; pub type Graph<Node> = BTreeMap<PackageId, Node>;
@ -35,27 +38,56 @@ pub struct DependencyGraphNode {
pub pkg_ref: PackageRefs, pub pkg_ref: PackageRefs,
} }
/// A container folder
#[derive(Debug, Clone)]
pub struct ContainerFolder(PathBuf);
impl ContainerFolder {
/// Returns the path of the container folder
pub fn path(&self) -> &Path {
&self.0
}
/// Returns the version's folder
pub fn version_folder(&self) -> &Path {
self.0.parent().unwrap()
}
}
impl DependencyGraphNode { impl DependencyGraphNode {
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String { pub(crate) fn dependencies_dir(
&self,
version_id: &VersionId,
project_target: TargetKind,
) -> String {
if self.pkg_ref.use_new_structure() { if self.pkg_ref.use_new_structure() {
version_id.target().packages_folder(&project_target) version_id.target().packages_folder(project_target)
} else { } else {
"..".to_string() "..".to_string()
} }
} }
/// Returns the folder to store the contents of the package in /// Returns the folder to store the contents of the package in
pub fn container_folder(&self, package_id: &PackageId) -> PathBuf { pub fn container_folder(&self, package_id: &PackageId) -> ContainerFolder {
let (name, version) = package_id.parts(); let (name, v_id) = package_id.parts();
if self.pkg_ref.is_wally_package() { if self.pkg_ref.is_wally_package() {
return PathBuf::from(format!("{}_{}@{}", name.scope(), name.name(), version)) return ContainerFolder(
.join(name.name()); PathBuf::from(format!(
"{}_{}@{}",
name.scope(),
name.name(),
v_id.version()
))
.join(name.name()),
);
} }
PathBuf::from(name.escaped()) ContainerFolder(
.join(version.to_string()) PathBuf::from(name.escaped())
.join(name.name()) .join(v_id.version().to_string())
.join(name.name()),
)
} }
/// Returns the folder to store the contents of the package in starting from the project's package directory /// Returns the folder to store the contents of the package in starting from the project's package directory
@ -69,7 +101,7 @@ impl DependencyGraphNode {
.package_dir() .package_dir()
.join(manifest_target_kind.packages_folder(package_id.version_id().target())) .join(manifest_target_kind.packages_folder(package_id.version_id().target()))
.join(PACKAGES_CONTAINER_NAME) .join(PACKAGES_CONTAINER_NAME)
.join(self.container_folder(package_id)) .join(self.container_folder(package_id).path())
} }
} }

View file

@ -151,8 +151,8 @@ impl Project {
let mut package_types = PackageTypes::new(); let mut package_types = PackageTypes::new();
while let Some(task) = tasks.join_next().await { while let Some(task) = tasks.join_next().await {
let (version_id, types) = task.unwrap()?; let (package_id, types) = task.unwrap()?;
package_types.insert(version_id, types); package_types.insert(package_id, types);
} }
// step 3. link all packages (and their dependencies), this time with types // step 3. link all packages (and their dependencies), this time with types
@ -248,12 +248,12 @@ impl Project {
if remove { if remove {
tasks.spawn(async move { tasks.spawn(async move {
fs::remove_dir_all(scripts_base).await?; into_link_result(fs::remove_dir_all(scripts_base).await)?;
if let Ok(mut entries) = fs::read_dir(&scripts_container).await { // remove the scripts container if it's empty
if entries.next_entry().await.transpose().is_none() { match fs::remove_dir(scripts_container).await {
drop(entries); Ok(_) => {}
fs::remove_dir(&scripts_container).await?; Err(e) if e.kind() == std::io::ErrorKind::DirectoryNotEmpty => {}
} r => return into_link_result(r),
} }
Ok(()) Ok(())
@ -319,8 +319,8 @@ impl Project {
.await?; .await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = let container_folder = packages_container_folder
packages_container_folder.join(node.node.container_folder(&package_id)); .join(node.node.container_folder(&package_id).path());
if let Some((alias, _, _)) = &node.node.direct { if let Some((alias, _, _)) = &node.node.direct {
project project
@ -367,10 +367,10 @@ impl Project {
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = packages_container_folder let container_folder = packages_container_folder
.join(dependency_node.node.container_folder(dependency_id)); .join(dependency_node.node.container_folder(dependency_id).path());
let linker_folder = create_and_canonicalize(node_container_folder.join( let linker_folder = create_and_canonicalize(node_container_folder.join(
node.node.base_folder( node.node.dependencies_dir(
package_id.version_id(), package_id.version_id(),
dependency_node.target.kind(), dependency_node.target.kind(),
), ),

View file

@ -60,7 +60,7 @@ impl TargetKind {
/// The folder to store packages in for this target /// The folder to store packages in for this target
/// self is the project's target, dependency is the target of the dependency /// self is the project's target, dependency is the target of the dependency
pub fn packages_folder(&self, dependency: &Self) -> String { pub fn packages_folder(self, dependency: Self) -> String {
// the code below might seem better, but it's just going to create issues with users trying // the code below might seem better, but it's just going to create issues with users trying
// to use a build script, since imports would break between targets // to use a build script, since imports would break between targets

View file

@ -369,7 +369,7 @@ impl Project {
.chain(std::iter::once(dependency_alias)) .chain(std::iter::once(dependency_alias))
.collect(), .collect(),
overridden.is_some(), overridden.is_some(),
*package_id.version_id().target(), package_id.version_id().target(),
)); ));
} }

View file

@ -184,7 +184,7 @@ impl PackageFs {
} }
for other_target in TargetKind::VARIANTS { for other_target in TargetKind::VARIANTS {
if target.packages_folder(other_target) == file_name { if target.packages_folder(*other_target) == file_name {
continue 'entry; continue 'entry;
} }
} }

View file

@ -21,8 +21,8 @@ impl VersionId {
} }
/// Access the target /// Access the target
pub fn target(&self) -> &TargetKind { pub fn target(&self) -> TargetKind {
&self.1 self.1
} }
/// Returns this version ID as a string that can be used in the filesystem /// Returns this version ID as a string that can be used in the filesystem
@ -36,8 +36,8 @@ impl VersionId {
} }
/// Access the parts of the version ID /// Access the parts of the version ID
pub fn parts(&self) -> (&Version, &TargetKind) { pub fn parts(&self) -> (&Version, TargetKind) {
(&self.0, &self.1) (&self.0, self.1)
} }
} }

View file

@ -1,6 +1,6 @@
use gix::Url; use gix::Url;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION; use reqwest::header::{ACCEPT, AUTHORIZATION};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::{BTreeMap, BTreeSet, HashSet}, collections::{BTreeMap, BTreeSet, HashSet},
@ -225,7 +225,7 @@ impl PackageSource for PesdePackageSource {
&urlencoding::encode(&id.version_id().target().to_string()), &urlencoding::encode(&id.version_id().target().to_string()),
); );
let mut request = reqwest.get(&url); let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
if let Some(token) = project.auth_config().tokens().get(&self.repo_url) { if let Some(token) = project.auth_config().tokens().get(&self.repo_url) {
tracing::debug!("using token for {}", self.repo_url); tracing::debug!("using token for {}", self.repo_url);