feat: improve linking process

This commit is contained in:
daimond113 2025-01-02 21:30:25 +01:00
parent e5b629e0c5
commit 6f5e2a2473
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
10 changed files with 214 additions and 245 deletions

View file

@ -17,6 +17,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- Change handling of graphs to a flat structure by @daimond113
- Store dependency over downloaded graphs in the lockfile by @daimond113
- Improve linking process by @daimond113
### Removed
- Remove old includes format compatibility by @daimond113

View file

@ -203,8 +203,7 @@ impl ExecuteCommand {
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
.reporter(reporter)
.refreshed_sources(refreshed_sources)
.prod(true)
.write(true),
.prod(true),
)
.await
.context("failed to download and link dependencies")?;

View file

@ -5,6 +5,7 @@ use std::{
time::Instant,
};
use super::files::make_executable;
use crate::cli::{
bin_dir,
reporters::{self, CliReporter},
@ -15,15 +16,13 @@ use colored::Colorize;
use fs_err::tokio as fs;
use pesde::{
download_and_link::{DownloadAndLinkHooks, DownloadAndLinkOptions},
graph::{DependencyGraph, DownloadedGraph},
graph::{DependencyGraph, DependencyGraphWithTarget},
lockfile::Lockfile,
manifest::{target::TargetKind, DependencyType},
Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
};
use tokio::task::JoinSet;
use super::files::make_executable;
fn bin_link_file(alias: &str) -> String {
let mut all_combinations = BTreeSet::new();
@ -63,11 +62,11 @@ impl DownloadAndLinkHooks for InstallHooks {
async fn on_bins_downloaded(
&self,
downloaded_graph: &DownloadedGraph,
graph: &DependencyGraphWithTarget,
) -> Result<(), Self::Error> {
let mut tasks = downloaded_graph
let mut tasks = graph
.values()
.filter(|node| node.target.as_ref().is_some_and(|t| t.bin_path().is_some()))
.filter(|node| node.target.bin_path().is_some())
.filter_map(|node| node.node.direct.as_ref())
.map(|(alias, _, _)| alias)
.filter(|alias| {
@ -249,6 +248,7 @@ pub async fn install(
.context("failed to build dependency graph")?;
let graph = Arc::new(graph);
if options.write {
root_progress.reset();
root_progress.set_length(0);
root_progress.set_message("download");
@ -272,29 +272,23 @@ pub async fn install(
.hooks(hooks)
.refreshed_sources(refreshed_sources)
.prod(options.prod)
.write(options.write)
.network_concurrency(options.network_concurrency),
)
.await
.context("failed to download and link dependencies")?;
#[cfg(feature = "patches")]
if options.write {
use pesde::{download_and_link::filter_graph, graph::ConvertableGraph};
{
use pesde::graph::ConvertableGraph;
root_progress.reset();
root_progress.set_length(0);
root_progress.set_message("patch");
project
.apply_patches(
&Arc::into_inner(filter_graph(&downloaded_graph, options.prod))
.unwrap()
.convert(),
reporter,
)
.apply_patches(&downloaded_graph.convert(), reporter)
.await?;
}
}
root_progress.set_message("finish");

View file

@ -1,15 +1,14 @@
use crate::{
graph::{DependencyGraph, DownloadedDependencyGraphNode},
manifest::DependencyType,
graph::{DependencyGraph, DependencyGraphNode},
reporters::{DownloadProgressReporter, DownloadsReporter},
source::{
fs::PackageFs,
ids::PackageId,
traits::{DownloadOptions, GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
},
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
Project, RefreshedSources,
};
use async_stream::try_stream;
use fs_err::tokio as fs;
use futures::Stream;
use std::{num::NonZeroUsize, sync::Arc};
use tokio::{sync::Semaphore, task::JoinSet};
@ -24,12 +23,6 @@ pub(crate) struct DownloadGraphOptions<Reporter> {
pub reporter: Option<Arc<Reporter>>,
/// The refreshed sources.
pub refreshed_sources: RefreshedSources,
/// Whether to skip dev dependencies.
pub prod: bool,
/// Whether to write the downloaded packages to disk.
pub write: bool,
/// Whether to download Wally packages.
pub wally: bool,
/// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize,
}
@ -44,9 +37,6 @@ where
reqwest,
reporter: None,
refreshed_sources: Default::default(),
prod: false,
write: false,
wally: false,
network_concurrency: NonZeroUsize::new(16).unwrap(),
}
}
@ -63,24 +53,6 @@ where
self
}
/// Sets whether to skip dev dependencies.
pub(crate) fn prod(mut self, prod: bool) -> Self {
self.prod = prod;
self
}
/// Sets whether to write the downloaded packages to disk.
pub(crate) fn write(mut self, write: bool) -> Self {
self.write = write;
self
}
/// Sets whether to download Wally packages.
pub(crate) fn wally(mut self, wally: bool) -> Self {
self.wally = wally;
self
}
/// Sets the max number of concurrent network requests.
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
self.network_concurrency = network_concurrency;
@ -94,9 +66,6 @@ impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
reqwest: self.reqwest.clone(),
reporter: self.reporter.clone(),
refreshed_sources: self.refreshed_sources.clone(),
prod: self.prod,
write: self.write,
wally: self.wally,
network_concurrency: self.network_concurrency,
}
}
@ -104,14 +73,14 @@ impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
impl Project {
/// Downloads a graph of dependencies.
#[instrument(skip_all, fields(prod = options.prod, wally = options.wally, write = options.write), level = "debug")]
#[instrument(skip_all, level = "debug")]
pub(crate) async fn download_graph<Reporter>(
&self,
graph: &DependencyGraph,
options: DownloadGraphOptions<Reporter>,
) -> Result<
impl Stream<
Item = Result<(DownloadedDependencyGraphNode, PackageId), errors::DownloadGraphError>,
Item = Result<(PackageId, DependencyGraphNode, PackageFs), errors::DownloadGraphError>,
>,
errors::DownloadGraphError,
>
@ -122,21 +91,13 @@ impl Project {
reqwest,
reporter,
refreshed_sources,
prod,
write,
wally,
network_concurrency,
} = options;
let manifest = self.deser_manifest().await?;
let manifest_target_kind = manifest.target.kind();
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
let mut tasks = graph
.iter()
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
.filter(|(_, node)| node.pkg_ref.like_wally() == wally)
.map(|(package_id, node)| {
let span = tracing::info_span!("download", package_id = package_id.to_string());
@ -144,7 +105,6 @@ impl Project {
let reqwest = reqwest.clone();
let reporter = reporter.clone();
let refreshed_sources = refreshed_sources.clone();
let package_dir = project.package_dir().to_path_buf();
let semaphore = semaphore.clone();
let package_id = Arc::new(package_id.clone());
let node = node.clone();
@ -170,15 +130,6 @@ impl Project {
)
.await?;
let container_folder = package_dir
.join(
manifest_target_kind.packages_folder(package_id.version_id().target()),
)
.join(PACKAGES_CONTAINER_NAME)
.join(node.container_folder(&package_id));
fs::create_dir_all(&container_folder).await?;
tracing::debug!("downloading");
let fs = match progress_reporter {
@ -213,33 +164,7 @@ impl Project {
tracing::debug!("downloaded");
let mut target = None;
if write {
if !prod || node.resolved_ty != DependencyType::Dev {
fs.write_to(&container_folder, project.cas_dir(), true)
.await?;
target = Some(
source
.get_target(
&node.pkg_ref,
&GetTargetOptions {
project,
path: Arc::from(container_folder),
id: package_id.clone(),
},
)
.await
.map_err(Box::new)?,
);
} else {
tracing::debug!("skipping write to disk, dev dependency in prod mode");
}
}
let downloaded_node = DownloadedDependencyGraphNode { node, target };
Ok((downloaded_node, Arc::into_inner(package_id).unwrap()))
Ok((Arc::into_inner(package_id).unwrap(), node, fs))
}
.instrument(span)
})
@ -263,10 +188,6 @@ pub mod errors {
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadGraphError {
/// An error occurred deserializing the project manifest
#[error("error deserializing project manifest")]
ManifestDeserializationFailed(#[from] crate::errors::ManifestReadError),
/// An error occurred refreshing a package source
#[error("failed to refresh package source")]
RefreshFailed(#[from] crate::source::errors::RefreshError),
@ -278,9 +199,5 @@ pub mod errors {
/// Error downloading a package
#[error("failed to download package")]
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
/// Error getting target
#[error("failed to get target")]
GetTargetFailed(#[from] Box<crate::source::errors::GetTargetError>),
}
}

View file

@ -1,34 +1,30 @@
use crate::{
download::DownloadGraphOptions,
graph::{DependencyGraph, DownloadedGraph},
graph::{
DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget,
DependencyGraphWithTarget,
},
manifest::DependencyType,
reporters::DownloadsReporter,
Project, RefreshedSources,
source::{
ids::PackageId,
traits::{GetTargetOptions, PackageRef, PackageSource},
},
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
};
use fs_err::tokio as fs;
use futures::TryStreamExt;
use std::{
collections::{BTreeMap, HashMap},
convert::Infallible,
future::{self, Future},
num::NonZeroUsize,
path::PathBuf,
sync::Arc,
};
use tokio::{pin, task::JoinSet};
use tracing::{instrument, Instrument};
/// Filters a graph to only include production dependencies, if `prod` is `true`
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> Arc<DownloadedGraph> {
if !prod {
return Arc::new(graph.clone());
}
Arc::new(
graph
.iter()
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
.map(|(id, node)| (id.clone(), node.clone()))
.collect(),
)
}
/// Hooks to perform actions after certain events during download and linking.
#[allow(unused_variables)]
pub trait DownloadAndLinkHooks {
@ -39,7 +35,7 @@ pub trait DownloadAndLinkHooks {
/// contains all downloaded packages.
fn on_scripts_downloaded(
&self,
downloaded_graph: &DownloadedGraph,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
}
@ -48,7 +44,7 @@ pub trait DownloadAndLinkHooks {
/// `downloaded_graph` contains all downloaded packages.
fn on_bins_downloaded(
&self,
downloaded_graph: &DownloadedGraph,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
}
@ -57,7 +53,7 @@ pub trait DownloadAndLinkHooks {
/// `downloaded_graph` contains all downloaded packages.
fn on_all_downloaded(
&self,
downloaded_graph: &DownloadedGraph,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
}
@ -80,8 +76,6 @@ pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
pub refreshed_sources: RefreshedSources,
/// Whether to skip dev dependencies.
pub prod: bool,
/// Whether to write the downloaded packages to disk.
pub write: bool,
/// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize,
}
@ -99,7 +93,6 @@ where
hooks: None,
refreshed_sources: Default::default(),
prod: false,
write: true,
network_concurrency: NonZeroUsize::new(16).unwrap(),
}
}
@ -128,12 +121,6 @@ where
self
}
/// Sets whether to write the downloaded packages to disk.
pub fn write(mut self, write: bool) -> Self {
self.write = write;
self
}
/// Sets the max number of concurrent network requests.
pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
self.network_concurrency = network_concurrency;
@ -149,7 +136,6 @@ impl Clone for DownloadAndLinkOptions {
hooks: self.hooks.clone(),
refreshed_sources: self.refreshed_sources.clone(),
prod: self.prod,
write: self.write,
network_concurrency: self.network_concurrency,
}
}
@ -157,12 +143,12 @@ impl Clone for DownloadAndLinkOptions {
impl Project {
/// Downloads a graph of dependencies and links them in the correct order
#[instrument(skip_all, fields(prod = options.prod, write = options.write), level = "debug")]
#[instrument(skip_all, fields(prod = options.prod), level = "debug")]
pub async fn download_and_link<Reporter, Hooks>(
&self,
graph: &Arc<DependencyGraph>,
options: DownloadAndLinkOptions<Reporter, Hooks>,
) -> Result<DownloadedGraph, errors::DownloadAndLinkError<Hooks::Error>>
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
where
Reporter: for<'a> DownloadsReporter<'a> + 'static,
Hooks: DownloadAndLinkHooks + 'static,
@ -173,81 +159,151 @@ impl Project {
hooks,
refreshed_sources,
prod,
write,
network_concurrency,
} = options;
let graph = graph.clone();
let reqwest = reqwest.clone();
let manifest = self.deser_manifest().await?;
let mut downloaded_graph = DownloadedGraph::new();
// step 1. download dependencies
let downloaded_graph = {
let mut downloaded_graph = BTreeMap::new();
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
.refreshed_sources(refreshed_sources.clone())
.prod(prod)
.write(write)
.network_concurrency(network_concurrency);
if let Some(reporter) = reporter {
download_graph_options = download_graph_options.reporter(reporter.clone());
}
// step 1. download pesde dependencies
self.download_graph(&graph, download_graph_options.clone())
.instrument(tracing::debug_span!("download (pesde)"))
.await?
.try_for_each(|(downloaded_node, id)| {
downloaded_graph.insert(id, downloaded_node);
future::ready(Ok(()))
})
let downloaded = self
.download_graph(&graph, download_graph_options.clone())
.instrument(tracing::debug_span!("download"))
.await?;
pin!(downloaded);
// step 2. link pesde dependencies. do so without types
if write {
self.link_dependencies(filter_graph(&downloaded_graph, prod), false)
.instrument(tracing::debug_span!("link (pesde)"))
.await?;
let mut tasks = JoinSet::new();
while let Some((id, node, fs)) = downloaded.try_next().await? {
let container_folder = self
.package_dir()
.join(
manifest
.target
.kind()
.packages_folder(id.version_id().target()),
)
.join(PACKAGES_CONTAINER_NAME)
.join(node.container_folder(&id));
if prod && node.resolved_ty == DependencyType::Dev {
continue;
}
if let Some(ref hooks) = hooks {
downloaded_graph.insert(id, (node, container_folder.clone()));
let cas_dir = self.cas_dir().to_path_buf();
tasks.spawn(async move {
fs::create_dir_all(&container_folder).await?;
fs.write_to(container_folder, cas_dir, true).await
});
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
downloaded_graph
};
let (downloaded_wally_graph, downloaded_other_graph) = downloaded_graph
.into_iter()
.partition::<HashMap<_, _>, _>(|(_, (node, _))| node.pkg_ref.is_wally_package());
let mut graph = Arc::new(DependencyGraphWithTarget::new());
async fn get_graph_targets<Hooks: DownloadAndLinkHooks>(
graph: &mut Arc<DependencyGraphWithTarget>,
project: &Project,
downloaded_graph: HashMap<PackageId, (DependencyGraphNode, PathBuf)>,
) -> Result<(), errors::DownloadAndLinkError<Hooks::Error>> {
let mut tasks = downloaded_graph
.into_iter()
.map(|(id, (node, container_folder))| {
let source = node.pkg_ref.source();
let path = Arc::from(container_folder.as_path());
let id = Arc::new(id.clone());
let project = project.clone();
async move {
let target = source
.get_target(
&node.pkg_ref,
&GetTargetOptions {
project,
path,
id: id.clone(),
},
)
.await?;
Ok::<_, errors::DownloadAndLinkError<Hooks::Error>>((
Arc::into_inner(id).unwrap(),
DependencyGraphNodeWithTarget { node, target },
))
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let (id, node) = task.unwrap()?;
Arc::get_mut(graph).unwrap().insert(id, node);
}
Ok(())
}
// step 2. get targets for non Wally packages (Wally packages require the scripts packages to be downloaded first)
get_graph_targets::<Hooks>(&mut graph, self, downloaded_other_graph)
.instrument(tracing::debug_span!("get targets (non-wally)"))
.await?;
self.link_dependencies(graph.clone(), false)
.instrument(tracing::debug_span!("link (non-wally)"))
.await?;
if let Some(hooks) = &hooks {
hooks
.on_scripts_downloaded(&downloaded_graph)
.on_scripts_downloaded(&graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
hooks
.on_bins_downloaded(&downloaded_graph)
.on_bins_downloaded(&graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
}
// step 3. download wally dependencies
self.download_graph(&graph, download_graph_options.clone().wally(true))
.instrument(tracing::debug_span!("download (wally)"))
.await?
.try_for_each(|(downloaded_node, id)| {
downloaded_graph.insert(id, downloaded_node);
future::ready(Ok(()))
})
// step 3. get targets for Wally packages
get_graph_targets::<Hooks>(&mut graph, self, downloaded_wally_graph)
.instrument(tracing::debug_span!("get targets (wally)"))
.await?;
// step 4. link ALL dependencies. do so with types
if write {
self.link_dependencies(filter_graph(&downloaded_graph, prod), true)
self.link_dependencies(graph.clone(), true)
.instrument(tracing::debug_span!("link (all)"))
.await?;
}
if let Some(ref hooks) = hooks {
if let Some(hooks) = &hooks {
hooks
.on_all_downloaded(&downloaded_graph)
.on_all_downloaded(&graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
}
Ok(downloaded_graph)
Ok(Arc::into_inner(graph).unwrap())
}
}
@ -259,6 +315,10 @@ pub mod errors {
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadAndLinkError<E> {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] crate::errors::ManifestReadError),
/// An error occurred while downloading the graph
#[error("error downloading graph")]
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
@ -270,5 +330,13 @@ pub mod errors {
/// An error occurred while executing the pesde callback
#[error("error executing hook")]
Hook(#[source] E),
/// IO error
#[error("io error")]
Io(#[from] std::io::Error),
/// Error getting a target
#[error("error getting target")]
GetTarget(#[from] crate::source::errors::GetTargetError),
}
}

View file

@ -47,7 +47,7 @@ impl DependencyGraphNode {
pub fn container_folder(&self, package_id: &PackageId) -> PathBuf {
let (name, version) = package_id.parts();
if self.pkg_ref.like_wally() {
if self.pkg_ref.is_wally_package() {
return PathBuf::from(format!(
"{}_{}@{}",
name.as_str().0,
@ -66,18 +66,17 @@ impl DependencyGraphNode {
/// A graph of `DependencyGraphNode`s
pub type DependencyGraph = Graph<DependencyGraphNode>;
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
/// A dependency graph node with a `Target`
#[derive(Debug, Clone)]
pub struct DownloadedDependencyGraphNode {
pub struct DependencyGraphNodeWithTarget {
/// The target of the package
/// None only if download was called with write = false or is a dev dependency in a prod install
pub target: Option<Target>,
pub target: Target,
/// The node
pub node: DependencyGraphNode,
}
/// A graph of `DownloadedDependencyGraphNode`s
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
pub type DependencyGraphWithTarget = Graph<DependencyGraphNodeWithTarget>;
/// A trait for converting a graph to a different type of graph
pub trait ConvertableGraph<Node> {
@ -85,7 +84,7 @@ pub trait ConvertableGraph<Node> {
fn convert(self) -> Graph<Node>;
}
impl ConvertableGraph<DependencyGraphNode> for DownloadedGraph {
impl ConvertableGraph<DependencyGraphNode> for DependencyGraphWithTarget {
fn convert(self) -> Graph<DependencyGraphNode> {
self.into_iter().map(|(id, node)| (id, node.node)).collect()
}

View file

@ -118,7 +118,7 @@ fn luau_style_path(path: &Path) -> String {
/// Get the require path for a library
#[allow(clippy::too_many_arguments)]
pub fn get_lib_require_path(
target: &TargetKind,
target: TargetKind,
base_dir: &Path,
lib_file: &RelativePath,
destination_dir: &Path,

View file

@ -1,5 +1,5 @@
use crate::{
graph::{DownloadedDependencyGraphNode, DownloadedGraph},
graph::{DependencyGraphNodeWithTarget, DependencyGraphWithTarget},
linking::generator::get_file_types,
manifest::Manifest,
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
@ -59,7 +59,7 @@ impl Project {
#[instrument(skip(self, graph), level = "debug")]
pub(crate) async fn link_dependencies(
&self,
graph: Arc<DownloadedGraph>,
graph: Arc<DependencyGraphWithTarget>,
with_types: bool,
) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?;
@ -87,7 +87,7 @@ impl Project {
let project = self.clone();
async move {
let Some(lib_file) = node.target.as_ref().and_then(|t| t.lib_path()) else {
let Some(lib_file) = node.target.lib_path() else {
return Ok((package_id, vec![]));
};
@ -123,10 +123,8 @@ impl Project {
vec![]
};
if let Some(build_files) = node
.target
.as_ref()
.filter(|_| !node.node.pkg_ref.like_wally())
if let Some(build_files) = Some(&node.target)
.filter(|_| !node.node.pkg_ref.is_wally_package())
.and_then(|t| t.build_files())
{
execute_script(
@ -165,7 +163,7 @@ impl Project {
container_folder: &Path,
root_container_folder: &Path,
relative_container_folder: &Path,
node: &DownloadedDependencyGraphNode,
node: &DependencyGraphNodeWithTarget,
package_id: &PackageId,
alias: &str,
package_types: &PackageTypes,
@ -173,14 +171,10 @@ impl Project {
) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
let Some(target) = &node.target else {
return Ok(());
};
if let Some(lib_file) = target.lib_path() {
if let Some(lib_file) = node.target.lib_path() {
let lib_module = generator::generate_lib_linking_module(
&generator::get_lib_require_path(
&target.kind(),
node.target.kind(),
base_folder,
lib_file,
container_folder,
@ -200,7 +194,7 @@ impl Project {
.await?;
}
if let Some(bin_file) = target.bin_path() {
if let Some(bin_file) = node.target.bin_path() {
let bin_module = generator::generate_bin_linking_module(
container_folder,
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
@ -214,7 +208,7 @@ impl Project {
.await?;
}
if let Some(scripts) = target.scripts().filter(|s| !s.is_empty()) {
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
let scripts_base =
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
.await?;
@ -241,7 +235,7 @@ impl Project {
async fn link(
&self,
graph: &Arc<DownloadedGraph>,
graph: &Arc<DependencyGraphWithTarget>,
manifest: &Arc<Manifest>,
package_types: &Arc<PackageTypes>,
is_complete: bool,
@ -322,10 +316,7 @@ impl Project {
let linker_folder = create_and_canonicalize(node_container_folder.join(
node.node.base_folder(
package_id.version_id(),
match &dependency_node.target {
Some(t) => t.kind(),
None => continue,
},
dependency_node.target.kind(),
),
))
.await?;

View file

@ -209,7 +209,7 @@ pub enum RobloxPlaceKind {
Server,
}
impl TryInto<RobloxPlaceKind> for &TargetKind {
impl TryInto<RobloxPlaceKind> for TargetKind {
type Error = ();
fn try_into(self) -> Result<RobloxPlaceKind, Self::Error> {

View file

@ -24,7 +24,7 @@ pub enum PackageRefs {
impl PackageRefs {
/// Returns whether this package reference should be treated as a Wally package
pub fn like_wally(&self) -> bool {
pub fn is_wally_package(&self) -> bool {
match self {
#[cfg(feature = "wally-compat")]
PackageRefs::Wally(_) => true,