fix: remove scripts linkers in incremental installs
Some checks failed
Debug / Get build version (push) Has been cancelled
Test & Lint / lint (push) Has been cancelled
Debug / Build for linux-aarch64 (push) Has been cancelled
Debug / Build for linux-x86_64 (push) Has been cancelled
Debug / Build for macos-aarch64 (push) Has been cancelled
Debug / Build for macos-x86_64 (push) Has been cancelled
Debug / Build for windows-x86_64 (push) Has been cancelled

Additionally, this commit changes the linking
process to be much less blocking, which should
bring a not-insignificant speedup to the
installation process.
This commit is contained in:
daimond113 2025-01-24 23:39:15 +01:00
parent 8835156b76
commit 801acb0264
No known key found for this signature in database
GPG key ID: 640DC95EC1190354
5 changed files with 466 additions and 459 deletions

View file

@ -1,25 +1,26 @@
use crate::{
all_packages_dirs,
download::DownloadGraphOptions,
graph::{
DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget,
DependencyGraphWithTarget,
},
manifest::{target::TargetKind, Alias, DependencyType},
manifest::{target::TargetKind, DependencyType},
reporters::DownloadsReporter,
source::{
ids::PackageId,
traits::{GetTargetOptions, PackageRef, PackageSource},
},
Project, RefreshedSources, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
Project, RefreshedSources, SCRIPTS_LINK_FOLDER,
};
use fs_err::tokio as fs;
use futures::{FutureExt, TryStreamExt};
use futures::TryStreamExt;
use std::{
collections::{HashMap, HashSet},
collections::HashMap,
convert::Infallible,
future::{self, Future},
num::NonZeroUsize,
path::{Path, PathBuf},
path::PathBuf,
sync::Arc,
};
use tokio::{pin, task::JoinSet};
@ -151,16 +152,6 @@ impl Clone for DownloadAndLinkOptions {
}
}
fn all_packages_dirs() -> HashSet<String> {
let mut dirs = HashSet::new();
for target_kind_a in TargetKind::VARIANTS {
for target_kind_b in TargetKind::VARIANTS {
dirs.insert(target_kind_a.packages_folder(*target_kind_b));
}
}
dirs
}
impl Project {
/// Downloads a graph of dependencies and links them in the correct order
#[instrument(skip_all, fields(prod = options.prod), level = "debug")]
@ -188,35 +179,24 @@ impl Project {
let manifest = self.deser_manifest().await?;
if force {
let mut deleted_folders = HashMap::new();
async fn remove_dir(dir: PathBuf) -> std::io::Result<()> {
tracing::debug!("force deleting the `{}` folder", dir.display());
async fn remove_dir(package_dir: PathBuf, folder: String) -> std::io::Result<()> {
tracing::debug!("force deleting the {folder} folder");
match fs::remove_dir_all(package_dir.join(&folder)).await {
match fs::remove_dir_all(dir).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) => Err(e),
}
}
for folder in all_packages_dirs() {
let package_dir = self.package_dir().to_path_buf();
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| remove_dir(self.package_dir().join(&folder)))
.chain(std::iter::once(remove_dir(
self.package_dir().join(SCRIPTS_LINK_FOLDER),
)))
.collect::<JoinSet<_>>();
deleted_folders
.entry(folder.to_string())
.or_insert_with(|| remove_dir(package_dir, folder));
}
deleted_folders.insert(
SCRIPTS_LINK_FOLDER.to_string(),
remove_dir(
self.package_dir().to_path_buf(),
SCRIPTS_LINK_FOLDER.to_string(),
),
);
let mut tasks = deleted_folders.into_values().collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
@ -393,227 +373,13 @@ impl Project {
}
let mut graph = Arc::into_inner(graph).unwrap();
let manifest = Arc::new(manifest);
if prod {
let (dev_graph, prod_graph) = graph
.into_iter()
.partition::<DependencyGraphWithTarget, _>(|(_, node)| {
node.node.resolved_ty == DependencyType::Dev
});
graph = prod_graph;
let dev_graph = Arc::new(dev_graph);
// the `true` argument means it'll remove the dependencies linkers
self.link(
&dev_graph,
&manifest,
&Arc::new(Default::default()),
false,
true,
)
.await?;
graph.retain(|_, node| node.node.resolved_ty != DependencyType::Dev);
}
if !force {
async fn remove_empty_dir(path: &Path) -> std::io::Result<()> {
match fs::remove_dir(path).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(e) => Err(e),
}
}
fn index_entry(
entry: fs::DirEntry,
packages_index_dir: &Path,
tasks: &mut JoinSet<std::io::Result<()>>,
used_paths: &Arc<HashSet<PathBuf>>,
#[cfg(feature = "wally-compat")] used_wally_paths: &Arc<HashSet<PathBuf>>,
) {
let path = entry.path();
let path_relative = path.strip_prefix(packages_index_dir).unwrap().to_path_buf();
let is_wally = entry
.file_name()
.to_str()
.expect("non UTF-8 folder name in packages index")
.contains("@");
if is_wally {
#[cfg(feature = "wally-compat")]
if !used_wally_paths.contains(&path_relative) {
tasks.spawn(async { fs::remove_dir_all(path).await });
}
#[cfg(not(feature = "wally-compat"))]
{
tracing::error!(
"found Wally package in index despite feature being disabled at `{}`",
path.display()
);
}
return;
}
let used_paths = used_paths.clone();
tasks.spawn(async move {
let mut tasks = JoinSet::new();
let mut entries = fs::read_dir(&path).await?;
while let Some(entry) = entries.next_entry().await? {
let version = entry.file_name();
let path_relative = path_relative.join(&version);
if used_paths.contains(&path_relative) {
continue;
}
let path = entry.path();
tasks.spawn(async { fs::remove_dir_all(path).await });
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&path).await
});
}
fn packages_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<std::io::Result<()>>,
expected_aliases: &Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_file() {
return Ok(());
}
let path = entry.path();
let name = path
.file_stem()
.unwrap()
.to_str()
.expect("non UTF-8 file name in packages folder");
let name = name.strip_suffix(".bin").unwrap_or(name);
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in packages folder: {e}");
return Ok(());
}
};
if !expected_aliases.contains(&name) {
fs::remove_file(path).await?;
}
Ok(())
});
}
let used_paths = graph
.iter()
.filter(|(_, node)| !node.node.pkg_ref.is_wally_package())
.map(|(id, node)| {
node.node
.container_folder(id)
.version_folder()
.to_path_buf()
})
.collect::<HashSet<_>>();
let used_paths = Arc::new(used_paths);
#[cfg(feature = "wally-compat")]
let used_wally_paths = graph
.iter()
.filter(|(_, node)| node.node.pkg_ref.is_wally_package())
.map(|(id, node)| {
node.node
.container_folder(id)
.version_folder()
.to_path_buf()
})
.collect::<HashSet<_>>();
#[cfg(feature = "wally-compat")]
let used_wally_paths = Arc::new(used_wally_paths);
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| {
let packages_dir = self.package_dir().join(&folder);
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
let used_paths = used_paths.clone();
#[cfg(feature = "wally-compat")]
let used_wally_paths = used_wally_paths.clone();
let expected_aliases = graph
.iter()
.filter(|(id, _)| {
manifest
.target
.kind()
.packages_folder(id.version_id().target())
== folder
})
.filter_map(|(_, node)| {
node.node.direct.as_ref().map(|(alias, _, _)| alias.clone())
})
.collect::<HashSet<_>>();
let expected_aliases = Arc::new(expected_aliases);
async move {
let mut index_entries = match fs::read_dir(&packages_index_dir).await {
Ok(entries) => entries,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(e),
};
// we don't handle NotFound here because the upper level will handle it
let mut packages_entries = fs::read_dir(&packages_dir).await?;
let mut tasks = JoinSet::new();
loop {
tokio::select! {
Some(entry) = index_entries.next_entry().map(Result::transpose) => {
index_entry(
entry?,
&packages_index_dir,
&mut tasks,
&used_paths,
#[cfg(feature = "wally-compat")]
&used_wally_paths,
);
}
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry(
entry?,
&mut tasks,
&expected_aliases,
);
}
else => break,
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&packages_index_dir).await?;
remove_empty_dir(&packages_dir).await?;
Ok::<_, std::io::Error>(())
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
if prod || !force {
self.remove_unused(&graph).await?;
}
Ok(graph)
@ -651,5 +417,9 @@ pub mod errors {
/// Error getting a target
#[error("error getting target")]
GetTarget(#[from] crate::source::errors::GetTargetError),
/// Removing unused dependencies failed
#[error("error removing unused dependencies")]
RemoveUnused(#[from] crate::linking::incremental::errors::RemoveUnusedError),
}
}

View file

@ -12,10 +12,7 @@ use crate::{
Project, PACKAGES_CONTAINER_NAME,
};
use serde::{Deserialize, Serialize};
use std::{
collections::BTreeMap,
path::{Path, PathBuf},
};
use std::{collections::BTreeMap, path::PathBuf};
/// A graph of dependencies
pub type Graph<Node> = BTreeMap<PackageId, Node>;
@ -38,22 +35,6 @@ pub struct DependencyGraphNode {
pub pkg_ref: PackageRefs,
}
/// A container folder
#[derive(Debug, Clone)]
pub struct ContainerFolder(PathBuf);
impl ContainerFolder {
/// Returns the path of the container folder
pub fn path(&self) -> &Path {
&self.0
}
/// Returns the version's folder
pub fn version_folder(&self) -> &Path {
self.0.parent().unwrap()
}
}
impl DependencyGraphNode {
pub(crate) fn dependencies_dir(
&self,
@ -68,26 +49,22 @@ impl DependencyGraphNode {
}
/// Returns the folder to store the contents of the package in
pub fn container_folder(&self, package_id: &PackageId) -> ContainerFolder {
pub fn container_folder(&self, package_id: &PackageId) -> PathBuf {
let (name, v_id) = package_id.parts();
if self.pkg_ref.is_wally_package() {
return ContainerFolder(
PathBuf::from(format!(
"{}_{}@{}",
name.scope(),
name.name(),
v_id.version()
))
.join(name.name()),
);
return PathBuf::from(format!(
"{}_{}@{}",
name.scope(),
name.name(),
v_id.version()
))
.join(name.name());
}
ContainerFolder(
PathBuf::from(name.escaped())
.join(v_id.version().to_string())
.join(name.name()),
)
PathBuf::from(name.escaped())
.join(v_id.version().to_string())
.join(name.name())
}
/// Returns the folder to store the contents of the package in starting from the project's package directory
@ -101,7 +78,7 @@ impl DependencyGraphNode {
.package_dir()
.join(manifest_target_kind.packages_folder(package_id.version_id().target()))
.join(PACKAGES_CONTAINER_NAME)
.join(self.container_folder(package_id).path())
.join(self.container_folder(package_id))
}
}

View file

@ -5,7 +5,7 @@
use crate::{
lockfile::Lockfile,
manifest::Manifest,
manifest::{target::TargetKind, Manifest},
source::{
traits::{PackageSource, RefreshOptions},
PackageSources,
@ -434,6 +434,16 @@ pub fn version_matches(req: &VersionReq, version: &Version) -> bool {
*req == VersionReq::STAR || req.matches(version)
}
pub(crate) fn all_packages_dirs() -> HashSet<String> {
let mut dirs = HashSet::new();
for target_kind_a in TargetKind::VARIANTS {
for target_kind_b in TargetKind::VARIANTS {
dirs.insert(target_kind_a.packages_folder(*target_kind_b));
}
}
dirs
}
/// Errors that can occur when using the pesde library
pub mod errors {
use std::path::PathBuf;

274
src/linking/incremental.rs Normal file
View file

@ -0,0 +1,274 @@
use crate::{
all_packages_dirs, graph::DependencyGraphWithTarget, manifest::Alias, Project,
PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
};
use fs_err::tokio as fs;
use futures::FutureExt;
use std::{
collections::HashSet,
path::{Path, PathBuf},
sync::Arc,
};
use tokio::task::JoinSet;
async fn remove_empty_dir(path: &Path) -> std::io::Result<()> {
match fs::remove_dir(path).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(e) => Err(e),
}
}
fn index_entry(
entry: fs::DirEntry,
packages_index_dir: &Path,
tasks: &mut JoinSet<std::io::Result<()>>,
used_paths: &Arc<HashSet<PathBuf>>,
) {
let path = entry.path();
let path_relative = path.strip_prefix(packages_index_dir).unwrap().to_path_buf();
let is_wally = entry
.file_name()
.to_str()
.expect("non UTF-8 folder name in packages index")
.contains("@");
let used_paths = used_paths.clone();
tasks.spawn(async move {
if is_wally {
#[cfg(not(feature = "wally-compat"))]
{
tracing::error!(
"found Wally package in index despite feature being disabled at `{}`",
path.display()
);
}
#[cfg(feature = "wally-compat")]
{
if !used_paths.contains(&path_relative) {
fs::remove_dir_all(path).await?;
}
return Ok(());
}
}
let mut tasks = JoinSet::new();
let mut entries = fs::read_dir(&path).await?;
while let Some(entry) = entries.next_entry().await? {
let version = entry.file_name();
let path_relative = path_relative.join(&version);
if used_paths.contains(&path_relative) {
continue;
}
let path = entry.path();
tasks.spawn(async { fs::remove_dir_all(path).await });
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&path).await
});
}
fn packages_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<std::io::Result<()>>,
expected_aliases: &Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_file() {
return Ok(());
}
let path = entry.path();
let name = path
.file_stem()
.unwrap()
.to_str()
.expect("non UTF-8 file name in packages folder");
let name = name.strip_suffix(".bin").unwrap_or(name);
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in packages folder: {e}");
return Ok(());
}
};
if !expected_aliases.contains(&name) {
fs::remove_file(path).await?;
}
Ok(())
});
}
fn scripts_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<std::io::Result<()>>,
expected_aliases: &Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_dir() {
return Ok(());
}
let path = entry.path();
let name = path
.file_name()
.unwrap()
.to_str()
.expect("non UTF-8 file name in scripts folder");
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in scripts folder: {e}");
return Ok(());
}
};
if !expected_aliases.contains(&name) {
fs::remove_dir_all(&path).await?;
}
Ok(())
});
}
impl Project {
/// Removes unused packages from the project
pub async fn remove_unused(
&self,
graph: &DependencyGraphWithTarget,
) -> Result<(), errors::RemoveUnusedError> {
let manifest = self.deser_manifest().await?;
let used_paths = graph
.iter()
.map(|(id, node)| node.node.container_folder(id).parent().unwrap().to_path_buf())
.collect::<HashSet<_>>();
let used_paths = Arc::new(used_paths);
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| {
let packages_dir = self.package_dir().join(&folder);
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
let used_paths = used_paths.clone();
let expected_aliases = graph
.iter()
.filter(|(id, _)| {
manifest
.target
.kind()
.packages_folder(id.version_id().target())
== folder
})
.filter_map(|(_, node)| {
node.node.direct.as_ref().map(|(alias, _, _)| alias.clone())
})
.collect::<HashSet<_>>();
let expected_aliases = Arc::new(expected_aliases);
async move {
let mut index_entries = match fs::read_dir(&packages_index_dir).await {
Ok(entries) => entries,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(e),
};
// we don't handle NotFound here because the upper level will handle it
let mut packages_entries = fs::read_dir(&packages_dir).await?;
let mut tasks = JoinSet::new();
loop {
tokio::select! {
Some(entry) = index_entries.next_entry().map(Result::transpose) => {
index_entry(
entry?,
&packages_index_dir,
&mut tasks,
&used_paths,
);
}
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry(
entry?,
&mut tasks,
&expected_aliases,
);
}
else => break,
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&packages_index_dir).await?;
remove_empty_dir(&packages_dir).await?;
Ok::<_, std::io::Error>(())
}
})
.collect::<JoinSet<_>>();
let scripts_dir = self.package_dir().join(SCRIPTS_LINK_FOLDER);
match fs::read_dir(&scripts_dir).await {
Ok(mut entries) => {
let expected_aliases = graph
.iter()
.filter_map(|(_, node)| {
node.node
.direct
.as_ref()
.map(|(alias, _, _)| alias.clone())
.filter(|_| node.target.scripts().is_some_and(|s| !s.is_empty()))
})
.collect::<HashSet<_>>();
let expected_aliases = Arc::new(expected_aliases);
while let Some(entry) = entries.next_entry().await? {
scripts_entry(entry, &mut tasks, &expected_aliases);
}
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e.into()),
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&scripts_dir).await?;
Ok(())
}
}
/// Errors that can occur when using incremental installs
pub mod errors {
use thiserror::Error;
/// Errors that can occur when removing unused packages
#[derive(Debug, Error)]
pub enum RemoveUnusedError {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] crate::errors::ManifestReadError),
/// IO error
#[error("IO error")]
Io(#[from] std::io::Error),
}
}

View file

@ -22,6 +22,8 @@ use tracing::{instrument, Instrument};
/// Generates linking modules for a project
pub mod generator;
/// Incremental installs
pub mod incremental;
async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
let p = path.as_ref();
@ -32,13 +34,11 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let hash = store_in_cas(cas_dir, contents.as_bytes()).await?;
match fs::remove_file(&destination).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e),
};
fs::hard_link(cas_path(&hash, cas_dir), destination).await
match fs::hard_link(cas_path(&hash, cas_dir), destination).await {
Ok(_) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()),
Err(e) => Err(e),
}
}
#[derive(Debug, Clone, Copy)]
@ -68,14 +68,8 @@ impl Project {
// step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed
self.link(
&graph,
&manifest,
&Arc::new(PackageTypes::default()),
false,
false,
)
.await?;
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false)
.await?;
if !with_types {
return Ok(());
@ -156,44 +150,32 @@ impl Project {
}
// step 3. link all packages (and their dependencies), this time with types
self.link(&graph, &manifest, &Arc::new(package_types), true, false)
self.link(&graph, &manifest, &Arc::new(package_types), true)
.await
}
#[allow(clippy::too_many_arguments)]
async fn link_files(
async fn link(
&self,
base_folder: &Path,
container_folder: &Path,
root_container_folder: &Path,
relative_container_folder: &Path,
node: &DependencyGraphNodeWithTarget,
package_id: &PackageId,
alias: &Alias,
package_types: &Arc<PackageTypes>,
graph: &Arc<DependencyGraphWithTarget>,
manifest: &Arc<Manifest>,
remove: bool,
is_root: bool,
package_types: &Arc<PackageTypes>,
is_complete: bool,
) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
let mut tasks = JoinSet::<Result<_, errors::LinkingError>>::new();
let mut link_files = |base_folder: &Path,
container_folder: &Path,
root_container_folder: &Path,
relative_container_folder: &Path,
node: &DependencyGraphNodeWithTarget,
package_id: &PackageId,
alias: &Alias,
is_root: bool|
-> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
#[allow(clippy::result_large_err)]
fn into_link_result(res: std::io::Result<()>) -> Result<(), errors::LinkingError> {
match res {
Ok(_) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) => Err(e.into()),
}
}
if let Some(lib_file) = node.target.lib_path() {
let destination = base_folder.join(format!("{alias}.luau"));
let mut tasks = JoinSet::<Result<(), errors::LinkingError>>::new();
if let Some(lib_file) = node.target.lib_path() {
let destination = base_folder.join(format!("{alias}.luau"));
if remove {
tasks.spawn(async move { into_link_result(fs::remove_file(destination).await) });
} else {
let lib_module = generator::generate_lib_linking_module(
&generator::get_lib_require_path(
node.target.kind(),
@ -215,14 +197,10 @@ impl Project {
.map_err(Into::into)
});
}
}
if let Some(bin_file) = node.target.bin_path() {
let destination = base_folder.join(format!("{alias}.bin.luau"));
if let Some(bin_file) = node.target.bin_path() {
let destination = base_folder.join(format!("{alias}.bin.luau"));
if remove {
tasks.spawn(async move { into_link_result(fs::remove_file(destination).await) });
} else {
let bin_module = generator::generate_bin_linking_module(
container_folder,
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
@ -235,30 +213,17 @@ impl Project {
.map_err(Into::into)
});
}
}
if let Some(scripts) = node
.target
.scripts()
.filter(|s| !s.is_empty() && node.node.direct.is_some() && is_root)
{
let scripts_container = self.package_dir().join(SCRIPTS_LINK_FOLDER);
let scripts_base =
create_and_canonicalize(scripts_container.join(alias.as_str())).await?;
if let Some(scripts) = node
.target
.scripts()
.filter(|s| !s.is_empty() && node.node.direct.is_some() && is_root)
{
let scripts_base = self
.package_dir()
.join(SCRIPTS_LINK_FOLDER)
.join(alias.as_str());
if remove {
tasks.spawn(async move {
into_link_result(fs::remove_dir_all(scripts_base).await)?;
// remove the scripts container if it's empty
match fs::remove_dir(scripts_container).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::DirectoryNotEmpty => {}
r => return into_link_result(r),
}
Ok(())
});
} else {
for (script_name, script_path) in scripts {
let destination = scripts_base.join(format!("{script_name}.luau"));
let script_module = generator::generate_script_linking_module(
@ -271,134 +236,145 @@ impl Project {
let cas_dir = self.cas_dir().to_path_buf();
tasks.spawn(async move {
fs::create_dir_all(destination.parent().unwrap()).await?;
write_cas(destination, &cas_dir, &script_module)
.await
.map_err(Into::into)
});
}
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
Ok(())
};
Ok(())
}
pub(crate) async fn link(
&self,
graph: &Arc<DependencyGraphWithTarget>,
manifest: &Arc<Manifest>,
package_types: &Arc<PackageTypes>,
is_complete: bool,
remove: bool,
) -> Result<(), errors::LinkingError> {
let mut tasks = graph
let mut node_tasks = graph
.iter()
.map(|(package_id, node)| {
let graph = graph.clone();
let manifest = manifest.clone();
let package_types = package_types.clone();
.map(|(id, node)| {
let base_folder = self.package_dir().join(
manifest
.target
.kind()
.packages_folder(id.version_id().target()),
);
let span = tracing::info_span!("link", package_id = package_id.to_string());
let package_id = package_id.clone();
let id = id.clone();
let node = node.clone();
let project = self.clone();
async move {
Ok::<_, errors::LinkingError>((
id,
node,
create_and_canonicalize(base_folder).await?,
))
}
})
.collect::<JoinSet<_>>();
let mut dependency_tasks = JoinSet::<Result<_, errors::LinkingError>>::new();
loop {
tokio::select! {
Some(res) = node_tasks.join_next() => {
let (package_id, node, base_folder) = res.unwrap()?;
let (node_container_folder, node_packages_folder) = {
let base_folder = create_and_canonicalize(
project.package_dir().join(
manifest
.target
.kind()
.packages_folder(package_id.version_id().target()),
),
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = packages_container_folder
.join(node.node.container_folder(&package_id).path());
let container_folder =
packages_container_folder.join(node.node.container_folder(&package_id));
if let Some((alias, _, _)) = &node.node.direct {
project
.link_files(
&base_folder,
&container_folder,
&base_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
&node,
&package_id,
alias,
&package_types,
&manifest,
remove,
true,
)
.await?;
link_files(
&base_folder,
&container_folder,
&base_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
&node,
&package_id,
alias,
true,
)?;
}
(container_folder, base_folder)
};
for (dependency_id, dependency_alias) in &node.node.dependencies {
let Some(dependency_node) = graph.get(dependency_id) else {
if is_complete {
return Err(errors::LinkingError::DependencyNotFound(
dependency_id.to_string(),
package_id.to_string(),
));
}
for (dep_id, dep_alias) in &node.node.dependencies {
let dep_id = dep_id.clone();
let dep_alias = dep_alias.clone();
let graph = graph.clone();
let node = node.clone();
let package_id = package_id.clone();
let node_container_folder = node_container_folder.clone();
let node_packages_folder = node_packages_folder.clone();
let package_dir = self.package_dir().to_path_buf();
continue;
};
dependency_tasks.spawn(async move {
let Some(dep_node) = graph.get(&dep_id) else {
return if is_complete {
Err(errors::LinkingError::DependencyNotFound(
dep_id.to_string(),
package_id.to_string(),
))
} else {
Ok(None)
};
};
let base_folder = create_and_canonicalize(
project.package_dir().join(
let base_folder = package_dir.join(
package_id
.version_id()
.target()
.packages_folder(dependency_id.version_id().target()),
),
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = packages_container_folder
.join(dependency_node.node.container_folder(dependency_id).path());
let linker_folder = create_and_canonicalize(node_container_folder.join(
node.node.dependencies_dir(
.packages_folder(dep_id.version_id().target()),
);
let linker_folder = node_container_folder.join(node.node.dependencies_dir(
package_id.version_id(),
dependency_node.target.kind(),
),
))
.await?;
dep_id.version_id().target(),
));
project
.link_files(
&linker_folder,
&container_folder,
&node_packages_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
dependency_node,
dependency_id,
dependency_alias,
&package_types,
&manifest,
remove,
false,
)
.await?;
Ok(Some((
dep_node.clone(),
dep_id,
dep_alias,
create_and_canonicalize(base_folder).await?,
create_and_canonicalize(linker_folder).await?,
node_packages_folder,
)))
});
}
},
Some(res) = dependency_tasks.join_next() => {
let Some((
dependency_node,
dependency_id,
dependency_alias,
base_folder,
linker_folder,
node_packages_folder,
)) = res.unwrap()?
else {
continue;
};
Ok(())
}
.instrument(span)
})
.collect::<JoinSet<_>>();
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = packages_container_folder
.join(dependency_node.node.container_folder(&dependency_id));
link_files(
&linker_folder,
&container_folder,
&node_packages_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
&dependency_node,
&dependency_id,
&dependency_alias,
false,
)?;
},
else => break,
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;