feat: switch to flat graph handling
Some checks are pending
Debug / Get build version (push) Waiting to run
Debug / Build for linux-x86_64 (push) Blocked by required conditions
Debug / Build for macos-aarch64 (push) Blocked by required conditions
Debug / Build for macos-x86_64 (push) Blocked by required conditions
Debug / Build for windows-x86_64 (push) Blocked by required conditions
Test & Lint / lint (push) Waiting to run

This commit is contained in:
daimond113 2025-01-01 00:34:21 +01:00
parent 80b8b151d7
commit 6a8dfe0ba3
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
25 changed files with 644 additions and 537 deletions

View file

@ -14,6 +14,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Inherit pesde-managed scripts from workspace root by @daimond113 - Inherit pesde-managed scripts from workspace root by @daimond113
- Allow using binaries from workspace root in member packages by @daimond113 - Allow using binaries from workspace root in member packages by @daimond113
### Changed
- Change handling of graphs to a flat structure by @daimond113
### Removed ### Removed
- Remove old includes format compatibility by @daimond113 - Remove old includes format compatibility by @daimond113
- Remove data redundacy for workspace package references by @daimond113 - Remove data redundacy for workspace package references by @daimond113

View file

@ -16,10 +16,10 @@ use pesde::{
manifest::Manifest, manifest::Manifest,
source::{ source::{
git_index::{read_file, root_tree, GitBasedSource}, git_index::{read_file, root_tree, GitBasedSource},
ids::VersionId,
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE}, pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::RefreshOptions, traits::RefreshOptions,
version_id::VersionId,
IGNORED_DIRS, IGNORED_FILES, IGNORED_DIRS, IGNORED_FILES,
}, },
MANIFEST_FILE_NAME, MANIFEST_FILE_NAME,

View file

@ -4,7 +4,7 @@ use actix_web::{
HttpResponse, HttpResponse,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use pesde::{names::PackageName, source::version_id::VersionId}; use pesde::{names::PackageName, source::ids::VersionId};
use std::{ use std::{
fmt::Display, fmt::Display,
path::{Path, PathBuf}, path::{Path, PathBuf},

View file

@ -1,6 +1,6 @@
use crate::{benv, error::Error, make_reqwest}; use crate::{benv, error::Error, make_reqwest};
use actix_web::HttpResponse; use actix_web::HttpResponse;
use pesde::{names::PackageName, source::version_id::VersionId}; use pesde::{names::PackageName, source::ids::VersionId};
use rusty_s3::{Bucket, Credentials, UrlStyle}; use rusty_s3::{Bucket, Credentials, UrlStyle};
use std::fmt::Display; use std::fmt::Display;

View file

@ -3,7 +3,7 @@ use crate::{
storage::StorageImpl, storage::StorageImpl,
}; };
use actix_web::{http::header::LOCATION, HttpResponse}; use actix_web::{http::header::LOCATION, HttpResponse};
use pesde::{names::PackageName, source::version_id::VersionId}; use pesde::{names::PackageName, source::ids::VersionId};
use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE}; use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE};
use rusty_s3::{ use rusty_s3::{
actions::{GetObject, PutObject}, actions::{GetObject, PutObject},

View file

@ -4,7 +4,6 @@ use clap::Args;
use futures::future::try_join_all; use futures::future::try_join_all;
use pesde::{ use pesde::{
source::{ source::{
refs::PackageRefs,
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions}, traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
}, },
@ -39,89 +38,77 @@ impl OutdatedCommand {
let refreshed_sources = RefreshedSources::new(); let refreshed_sources = RefreshedSources::new();
if try_join_all( if try_join_all(graph.into_iter().map(|(current_id, node)| {
graph let project = project.clone();
.into_iter() let refreshed_sources = refreshed_sources.clone();
.flat_map(|(_, versions)| versions.into_iter()) async move {
.map(|(current_version_id, node)| { let Some((alias, mut specifier, _)) = node.node.direct else {
let project = project.clone(); return Ok::<bool, anyhow::Error>(true);
let refreshed_sources = refreshed_sources.clone(); };
async move {
let Some((alias, mut specifier, _)) = node.node.direct else {
return Ok::<bool, anyhow::Error>(true);
};
if matches!( if matches!(
specifier, specifier,
DependencySpecifiers::Git(_) DependencySpecifiers::Git(_)
| DependencySpecifiers::Workspace(_) | DependencySpecifiers::Workspace(_)
| DependencySpecifiers::Path(_) | DependencySpecifiers::Path(_)
) { ) {
return Ok(true); return Ok(true);
}
let source = node.node.pkg_ref.source();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
.await?;
if !self.strict {
match &mut specifier {
DependencySpecifiers::Pesde(spec) => {
spec.version = VersionReq::STAR;
} }
#[cfg(feature = "wally-compat")]
let source = node.node.pkg_ref.source(); DependencySpecifiers::Wally(spec) => {
refreshed_sources spec.version = VersionReq::STAR;
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
.await?;
if !self.strict {
match &mut specifier {
DependencySpecifiers::Pesde(spec) => {
spec.version = VersionReq::STAR;
}
#[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(spec) => {
spec.version = VersionReq::STAR;
}
DependencySpecifiers::Git(_) => {}
DependencySpecifiers::Workspace(_) => {}
DependencySpecifiers::Path(_) => {}
};
} }
DependencySpecifiers::Git(_) => {}
DependencySpecifiers::Workspace(_) => {}
DependencySpecifiers::Path(_) => {}
};
}
let version_id = source let version_id = source
.resolve( .resolve(
&specifier, &specifier,
&ResolveOptions { &ResolveOptions {
project: project.clone(), project: project.clone(),
target: manifest_target_kind, target: manifest_target_kind,
refreshed_sources: refreshed_sources.clone(), refreshed_sources: refreshed_sources.clone(),
}, },
) )
.await .await
.context("failed to resolve package versions")? .context("failed to resolve package versions")?
.1 .1
.pop_last() .pop_last()
.map(|(v_id, _)| v_id) .map(|(v_id, _)| v_id)
.with_context(|| format!("no versions of {specifier} found"))?; .with_context(|| format!("no versions of {specifier} found"))?;
if version_id != current_version_id { if version_id != *current_id.version_id() {
println!( println!(
"{} {} ({alias}) {} -> {}", "{} ({alias}) {} -> {version_id}",
match node.node.pkg_ref { current_id.name(),
PackageRefs::Pesde(pkg_ref) => pkg_ref.name.to_string(), current_id.version_id(),
#[cfg(feature = "wally-compat")] );
PackageRefs::Wally(pkg_ref) => pkg_ref.name.to_string(),
_ => unreachable!(),
},
current_version_id.target(),
current_version_id.version(),
version_id.version()
);
return Ok(false); return Ok(false);
} }
Ok(true) Ok(true)
} }
}), }))
)
.await? .await?
.into_iter() .into_iter()
.all(|b| b) .all(|b| b)

View file

@ -29,12 +29,9 @@ impl PatchCommand {
anyhow::bail!("outdated lockfile, please run the install command first") anyhow::bail!("outdated lockfile, please run the install command first")
}; };
let (name, version_id) = self.package.get(&graph)?; let id = self.package.get(&graph)?;
let node = graph let node = graph.get(&id).context("package not found in graph")?;
.get(&name)
.and_then(|versions| versions.get(&version_id))
.context("package not found in graph")?;
if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) { if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) {
anyhow::bail!("cannot patch a workspace package") anyhow::bail!("cannot patch a workspace package")
@ -45,8 +42,8 @@ impl PatchCommand {
let directory = project let directory = project
.data_dir() .data_dir()
.join("patches") .join("patches")
.join(name.escaped()) .join(id.name().escaped())
.join(version_id.escaped()) .join(id.version_id().escaped())
.join(chrono::Utc::now().timestamp().to_string()); .join(chrono::Utc::now().timestamp().to_string());
fs::create_dir_all(&directory).await?; fs::create_dir_all(&directory).await?;

View file

@ -2,7 +2,12 @@ use crate::cli::up_to_date_lockfile;
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project}; use pesde::{
names::PackageNames,
patches::create_patch,
source::ids::{PackageId, VersionId},
Project,
};
use std::{path::PathBuf, str::FromStr}; use std::{path::PathBuf, str::FromStr};
#[derive(Debug, Args)] #[derive(Debug, Args)]
@ -20,7 +25,7 @@ impl PatchCommitCommand {
anyhow::bail!("outdated lockfile, please run the install command first") anyhow::bail!("outdated lockfile, please run the install command first")
}; };
let (name, version_id) = ( let id = PackageId::new(
PackageNames::from_escaped( PackageNames::from_escaped(
self.directory self.directory
.parent() .parent()
@ -43,10 +48,7 @@ impl PatchCommitCommand {
)?, )?,
); );
graph graph.get(&id).context("package not found in graph")?;
.get(&name)
.and_then(|versions| versions.get(&version_id))
.context("package not found in graph")?;
let mut manifest = toml_edit::DocumentMut::from_str( let mut manifest = toml_edit::DocumentMut::from_str(
&project &project
@ -66,7 +68,11 @@ impl PatchCommitCommand {
.await .await
.context("failed to create patches directory")?; .context("failed to create patches directory")?;
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped()); let patch_file_name = format!(
"{}-{}.patch",
id.name().escaped(),
id.version_id().escaped()
);
let patch_file = patches_dir.join(&patch_file_name); let patch_file = patches_dir.join(&patch_file_name);
if patch_file.exists() { if patch_file.exists() {
@ -78,7 +84,7 @@ impl PatchCommitCommand {
.context("failed to write patch file")?; .context("failed to write patch file")?;
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())) manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[&name.to_string()][&version_id.to_string()] = [&id.name().to_string()][&id.version_id().to_string()] =
toml_edit::value(format!("patches/{patch_file_name}")); toml_edit::value(format!("patches/{patch_file_name}"));
project project

View file

@ -114,8 +114,7 @@ impl PublishCommand {
if lockfile if lockfile
.graph .graph
.values() .values()
.flatten() .filter_map(|node| node.node.direct.as_ref().map(|_| node))
.filter_map(|(_, node)| node.node.direct.as_ref().map(|_| node))
.any(|node| { .any(|node| {
node.target.build_files().is_none() node.target.build_files().is_none()
&& !matches!(node.node.resolved_ty, DependencyType::Dev) && !matches!(node.node.resolved_ty, DependencyType::Dev)

View file

@ -73,35 +73,39 @@ impl RunCommand {
let pkg_name = PackageNames::Pesde(pkg_name); let pkg_name = PackageNames::Pesde(pkg_name);
for (version_id, node) in graph.get(&pkg_name).context("package not found in graph")? { let mut versions = graph
if node.node.direct.is_none() { .into_iter()
continue; .filter(|(id, node)| *id.name() == pkg_name && node.node.direct.is_some())
} .collect::<Vec<_>>();
let Some(bin_path) = node.target.bin_path() else { let (id, node) = match versions.len() {
anyhow::bail!("package has no bin path"); 0 => anyhow::bail!("package not found"),
}; 1 => versions.pop().unwrap(),
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
};
let base_folder = project let Some(bin_path) = node.target.bin_path() else {
.deser_manifest() anyhow::bail!("package has no bin path");
.await? };
.target
.kind()
.packages_folder(version_id.target());
let container_folder = node.node.container_folder(
&project
.package_dir()
.join(base_folder)
.join(PACKAGES_CONTAINER_NAME),
&pkg_name,
version_id.version(),
);
let path = bin_path.to_path(&container_folder); let base_folder = project
.deser_manifest()
.await?
.target
.kind()
.packages_folder(id.version_id().target());
let container_folder = node.node.container_folder(
&project
.package_dir()
.join(base_folder)
.join(PACKAGES_CONTAINER_NAME),
&id,
);
run(&path, &path); let path = bin_path.to_path(&container_folder);
return Ok(());
} run(&path, &path);
return Ok(());
} }
if let Ok(manifest) = project.deser_manifest().await { if let Ok(manifest) = project.deser_manifest().await {

View file

@ -64,11 +64,10 @@ impl DownloadAndLinkHooks for InstallHooks {
async fn on_bins_downloaded( async fn on_bins_downloaded(
&self, &self,
downloaded_graph: &pesde::lockfile::DownloadedGraph, downloaded_graph: &DownloadedGraph,
) -> Result<(), Self::Error> { ) -> Result<(), Self::Error> {
let mut tasks = downloaded_graph let mut tasks = downloaded_graph
.values() .values()
.flat_map(|versions| versions.values())
.filter(|node| node.target.bin_path().is_some()) .filter(|node| node.target.bin_path().is_some())
.filter_map(|node| node.node.direct.as_ref()) .filter_map(|node| node.node.direct.as_ref())
.map(|(alias, _, _)| alias) .map(|(alias, _, _)| alias)
@ -242,15 +241,7 @@ pub async fn install(
lockfile lockfile
.graph .graph
.into_iter() .into_iter()
.map(|(name, versions)| { .map(|(id, node)| (id, node.node))
(
name,
versions
.into_iter()
.map(|(version, node)| (version, node.node))
.collect(),
)
})
.collect() .collect()
}); });
@ -345,42 +336,38 @@ pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: D
let mut new_pkg_map = BTreeMap::new(); let mut new_pkg_map = BTreeMap::new();
let mut new_direct_pkg_map = BTreeMap::new(); let mut new_direct_pkg_map = BTreeMap::new();
for (name, versions) in &old_graph { for (id, node) in &old_graph {
for (version, node) in versions { old_pkg_map.insert(id, node);
old_pkg_map.insert((name.clone(), version), node); if node.direct.is_some() {
if node.direct.is_some() { old_direct_pkg_map.insert(id, node);
old_direct_pkg_map.insert((name.clone(), version), node);
}
} }
} }
for (name, versions) in &new_graph { for (id, node) in &new_graph {
for (version, node) in versions { new_pkg_map.insert(id, &node.node);
new_pkg_map.insert((name.clone(), version), &node.node); if node.node.direct.is_some() {
if node.node.direct.is_some() { new_direct_pkg_map.insert(id, &node.node);
new_direct_pkg_map.insert((name.clone(), version), &node.node);
}
} }
} }
let added_pkgs = new_pkg_map let added_pkgs = new_pkg_map
.iter() .iter()
.filter(|(key, _)| !old_pkg_map.contains_key(key)) .filter(|(key, _)| !old_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node)) .map(|(key, &node)| (key, node))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let removed_pkgs = old_pkg_map let removed_pkgs = old_pkg_map
.iter() .iter()
.filter(|(key, _)| !new_pkg_map.contains_key(key)) .filter(|(key, _)| !new_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node)) .map(|(key, &node)| (key, node))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let added_direct_pkgs = new_direct_pkg_map let added_direct_pkgs = new_direct_pkg_map
.iter() .iter()
.filter(|(key, _)| !old_direct_pkg_map.contains_key(key)) .filter(|(key, _)| !old_direct_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node)) .map(|(key, &node)| (key, node))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let removed_direct_pkgs = old_direct_pkg_map let removed_direct_pkgs = old_direct_pkg_map
.iter() .iter()
.filter(|(key, _)| !new_direct_pkg_map.contains_key(key)) .filter(|(key, _)| !new_direct_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node)) .map(|(key, &node)| (key, node))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -448,12 +435,12 @@ pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: D
}; };
println!("{}", format!("{ty_name}:").yellow().bold()); println!("{}", format!("{ty_name}:").yellow().bold());
for ((name, version), added) in set { for (id, added) in set {
println!( println!(
"{} {} {}", "{} {} {}",
if added { "+".green() } else { "-".red() }, if added { "+".green() } else { "-".red() },
name, id.name(),
version.to_string().dimmed() id.version_id().to_string().dimmed()
); );
} }
} }

View file

@ -11,7 +11,8 @@ use pesde::{
}, },
names::{PackageName, PackageNames}, names::{PackageName, PackageNames},
source::{ source::{
specifiers::DependencySpecifiers, version_id::VersionId, ids::{PackageId, VersionId},
specifiers::DependencySpecifiers,
workspace::specifier::VersionTypeOrReq, workspace::specifier::VersionTypeOrReq,
}, },
Project, Project,
@ -116,7 +117,6 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
let specs = lockfile let specs = lockfile
.graph .graph
.iter() .iter()
.flat_map(|(_, versions)| versions)
.filter_map(|(_, node)| { .filter_map(|(_, node)| {
node.node node.node
.direct .direct
@ -166,32 +166,31 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
impl VersionedPackageName { impl VersionedPackageName {
#[cfg(feature = "patches")] #[cfg(feature = "patches")]
fn get( fn get(self, graph: &pesde::lockfile::DownloadedGraph) -> anyhow::Result<PackageId> {
self,
graph: &pesde::lockfile::DownloadedGraph,
) -> anyhow::Result<(PackageNames, VersionId)> {
let version_id = match self.1 { let version_id = match self.1 {
Some(version) => version, Some(version) => version,
None => { None => {
let versions = graph.get(&self.0).context("package not found in graph")?; let versions = graph
if versions.len() == 1 { .keys()
let version = versions.keys().next().unwrap().clone(); .filter(|id| *id.name() == self.0)
tracing::debug!("only one version found, using {version}"); .collect::<Vec<_>>();
version
} else { match versions.len() {
anyhow::bail!( 0 => anyhow::bail!("package not found"),
1 => versions[0].version_id().clone(),
_ => anyhow::bail!(
"multiple versions found, please specify one of: {}", "multiple versions found, please specify one of: {}",
versions versions
.keys() .iter()
.map(|v| v.to_string()) .map(|v| v.to_string())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", ") .join(", ")
); ),
} }
} }
}; };
Ok((self.0, version_id)) Ok(PackageId::new(self.0, version_id))
} }
} }

View file

@ -1,11 +1,10 @@
use crate::{ use crate::{
lockfile::{DependencyGraph, DownloadedDependencyGraphNode}, lockfile::{DependencyGraph, DownloadedDependencyGraphNode},
manifest::DependencyType, manifest::DependencyType,
names::PackageNames,
reporters::{DownloadProgressReporter, DownloadsReporter}, reporters::{DownloadProgressReporter, DownloadsReporter},
source::{ source::{
ids::PackageId,
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions}, traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
version_id::VersionId,
}, },
Project, RefreshedSources, PACKAGES_CONTAINER_NAME, Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
}; };
@ -112,10 +111,7 @@ impl Project {
options: DownloadGraphOptions<Reporter>, options: DownloadGraphOptions<Reporter>,
) -> Result< ) -> Result<
impl Stream< impl Stream<
Item = Result< Item = Result<(DownloadedDependencyGraphNode, PackageId), errors::DownloadGraphError>,
(DownloadedDependencyGraphNode, PackageNames, VersionId),
errors::DownloadGraphError,
>,
>, >,
errors::DownloadGraphError, errors::DownloadGraphError,
> >
@ -139,19 +135,10 @@ impl Project {
let mut tasks = graph let mut tasks = graph
.iter() .iter()
.flat_map(|(name, versions)| {
versions
.iter()
.map(|(version_id, node)| (name.clone(), version_id.clone(), node.clone()))
})
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them // we need to download pesde packages first, since scripts (for target finding for example) can depend on them
.filter(|(_, _, node)| node.pkg_ref.like_wally() == wally) .filter(|(_, node)| node.pkg_ref.like_wally() == wally)
.map(|(name, version_id, node)| { .map(|(package_id, node)| {
let span = tracing::info_span!( let span = tracing::info_span!("download", package_id = package_id.to_string(),);
"download",
name = name.to_string(),
version_id = version_id.to_string()
);
let project = self.clone(); let project = self.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
@ -159,12 +146,13 @@ impl Project {
let refreshed_sources = refreshed_sources.clone(); let refreshed_sources = refreshed_sources.clone();
let package_dir = project.package_dir().to_path_buf(); let package_dir = project.package_dir().to_path_buf();
let semaphore = semaphore.clone(); let semaphore = semaphore.clone();
let package_id = package_id.clone();
let node = node.clone();
async move { async move {
let display_name = format!("{name}@{version_id}");
let progress_reporter = reporter let progress_reporter = reporter
.as_deref() .as_deref()
.map(|reporter| reporter.report_download(&display_name)); .map(|reporter| reporter.report_download(&package_id.to_string()));
let _permit = semaphore.acquire().await; let _permit = semaphore.acquire().await;
@ -184,10 +172,12 @@ impl Project {
let container_folder = node.container_folder( let container_folder = node.container_folder(
&package_dir &package_dir
.join(manifest_target_kind.packages_folder(version_id.target())) .join(
manifest_target_kind
.packages_folder(package_id.version_id().target()),
)
.join(PACKAGES_CONTAINER_NAME), .join(PACKAGES_CONTAINER_NAME),
&name, &package_id,
version_id.version(),
); );
fs::create_dir_all(&container_folder).await?; fs::create_dir_all(&container_folder).await?;
@ -234,7 +224,7 @@ impl Project {
} }
let downloaded_node = DownloadedDependencyGraphNode { node, target }; let downloaded_node = DownloadedDependencyGraphNode { node, target };
Ok((downloaded_node, name, version_id)) Ok((downloaded_node, package_id))
} }
.instrument(span) .instrument(span)
}) })

View file

@ -15,24 +15,18 @@ use std::{
use tracing::{instrument, Instrument}; use tracing::{instrument, Instrument};
/// Filters a graph to only include production dependencies, if `prod` is `true` /// Filters a graph to only include production dependencies, if `prod` is `true`
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph { pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> Arc<DownloadedGraph> {
if !prod { if !prod {
return graph.clone(); return Arc::new(graph.clone());
} }
graph Arc::new(
.iter() graph
.map(|(name, versions)| { .iter()
( .filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
name.clone(), .map(|(id, node)| (id.clone(), node.clone()))
versions .collect(),
.iter() )
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
.map(|(v_id, node)| (v_id.clone(), node.clone()))
.collect(),
)
})
.collect()
} }
/// Receiver for dependencies downloaded and linked /// Receiver for dependencies downloaded and linked
@ -206,11 +200,8 @@ impl Project {
self.download_graph(&graph, download_graph_options.clone()) self.download_graph(&graph, download_graph_options.clone())
.instrument(tracing::debug_span!("download (pesde)")) .instrument(tracing::debug_span!("download (pesde)"))
.await? .await?
.try_for_each(|(downloaded_node, name, version_id)| { .try_for_each(|(downloaded_node, id)| {
downloaded_graph downloaded_graph.insert(id, downloaded_node);
.entry(name)
.or_default()
.insert(version_id, downloaded_node);
future::ready(Ok(())) future::ready(Ok(()))
}) })
@ -218,7 +209,7 @@ impl Project {
// step 2. link pesde dependencies. do so without types // step 2. link pesde dependencies. do so without types
if write { if write {
self.link_dependencies(&filter_graph(&downloaded_graph, prod), false) self.link_dependencies(filter_graph(&downloaded_graph, prod), false)
.instrument(tracing::debug_span!("link (pesde)")) .instrument(tracing::debug_span!("link (pesde)"))
.await?; .await?;
} }
@ -239,11 +230,8 @@ impl Project {
self.download_graph(&graph, download_graph_options.clone().wally(true)) self.download_graph(&graph, download_graph_options.clone().wally(true))
.instrument(tracing::debug_span!("download (wally)")) .instrument(tracing::debug_span!("download (wally)"))
.await? .await?
.try_for_each(|(downloaded_node, name, version_id)| { .try_for_each(|(downloaded_node, id)| {
downloaded_graph downloaded_graph.insert(id, downloaded_node);
.entry(name)
.or_default()
.insert(version_id, downloaded_node);
future::ready(Ok(())) future::ready(Ok(()))
}) })
@ -251,7 +239,7 @@ impl Project {
// step 4. link ALL dependencies. do so with types // step 4. link ALL dependencies. do so with types
if write { if write {
self.link_dependencies(&filter_graph(&downloaded_graph, prod), true) self.link_dependencies(filter_graph(&downloaded_graph, prod), true)
.instrument(tracing::debug_span!("link (all)")) .instrument(tracing::debug_span!("link (all)"))
.await?; .await?;
} }

View file

@ -211,7 +211,18 @@ impl Project {
#[instrument(skip(self), ret(level = "trace"), level = "debug")] #[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> { pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs::read_to_string(self.package_dir().join(LOCKFILE_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir().join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(match toml::from_str(&string) {
Ok(lockfile) => lockfile,
Err(e) => {
#[allow(deprecated)]
let Ok(old_lockfile) = toml::from_str::<lockfile::old::LockfileOld>(&string) else {
return Err(errors::LockfileReadError::Serde(e));
};
#[allow(deprecated)]
old_lockfile.to_new()
}
})
} }
/// Write the lockfile /// Write the lockfile

View file

@ -2,24 +2,22 @@ use crate::{
linking::generator::get_file_types, linking::generator::get_file_types,
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph}, lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
manifest::Manifest, manifest::Manifest,
names::PackageNames,
scripts::{execute_script, ExecuteScriptHooks, ScriptName}, scripts::{execute_script, ExecuteScriptHooks, ScriptName},
source::{ source::{
fs::{cas_path, store_in_cas}, fs::{cas_path, store_in_cas},
ids::PackageId,
traits::PackageRef, traits::PackageRef,
version_id::VersionId,
}, },
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER, Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::future::try_join_all;
use std::{ use std::{
collections::HashMap, collections::HashMap,
ffi::OsStr, ffi::OsStr,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use tokio::task::spawn_blocking; use tokio::task::{spawn_blocking, JoinSet};
use tracing::{instrument, Instrument}; use tracing::{instrument, Instrument};
/// Generates linking modules for a project /// Generates linking modules for a project
@ -59,7 +57,7 @@ impl Project {
#[instrument(skip(self, graph), level = "debug")] #[instrument(skip(self, graph), level = "debug")]
pub async fn link_dependencies( pub async fn link_dependencies(
&self, &self,
graph: &DownloadedGraph, graph: Arc<DownloadedGraph>,
with_types: bool, with_types: bool,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?; let manifest = self.deser_manifest().await?;
@ -68,7 +66,7 @@ impl Project {
// step 1. link all non-wally packages (and their dependencies) temporarily without types // step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed // we do this separately to allow the required tools for the scripts to be installed
self.link(graph, &manifest, &Arc::new(Default::default()), false) self.link(&graph, &manifest, &Arc::new(Default::default()), false)
.await?; .await?;
if !with_types { if !with_types {
@ -76,83 +74,86 @@ impl Project {
} }
// step 2. extract the types from libraries, prepare Roblox packages for syncing // step 2. extract the types from libraries, prepare Roblox packages for syncing
let package_types = try_join_all(graph.iter().map(|(name, versions)| async move { let mut tasks = graph
Ok::<_, errors::LinkingError>(( .iter()
name, .map(|(package_id, node)| {
try_join_all(versions.iter().map(|(version_id, node)| { let span =
async move { tracing::info_span!("extract types", package_id = package_id.to_string(),);
let Some(lib_file) = node.target.lib_path() else {
return Ok((version_id, vec![]));
};
let container_folder = node.node.container_folder( let package_id = package_id.clone();
&self let node = node.clone();
.package_dir() let project = self.clone();
.join(manifest_target_kind.packages_folder(version_id.target()))
.join(PACKAGES_CONTAINER_NAME),
name,
version_id.version(),
);
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND { async move {
let lib_file = lib_file.to_path(&container_folder); let Some(lib_file) = node.target.lib_path() else {
return Ok((package_id, vec![]));
};
let contents = match fs::read_to_string(&lib_file).await { let container_folder = node.node.container_folder(
Ok(contents) => contents, &project
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { .package_dir()
return Err(errors::LinkingError::LibFileNotFound( .join(
lib_file.display().to_string(), manifest_target_kind
)); .packages_folder(package_id.version_id().target()),
}
Err(e) => return Err(e.into()),
};
let types = spawn_blocking(move || get_file_types(&contents))
.await
.unwrap();
tracing::debug!("contains {} exported types", types.len());
types
} else {
vec![]
};
if let Some(build_files) = Some(&node.target)
.filter(|_| !node.node.pkg_ref.like_wally())
.and_then(|t| t.build_files())
{
execute_script(
ScriptName::RobloxSyncConfigGenerator,
self,
LinkingExecuteScriptHooks,
std::iter::once(container_folder.as_os_str())
.chain(build_files.iter().map(OsStr::new)),
false,
) )
.await .join(PACKAGES_CONTAINER_NAME),
.map_err(errors::LinkingError::ExecuteScript)?; &package_id,
} );
Ok((version_id, types)) let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
let lib_file = lib_file.to_path(&container_folder);
let contents = match fs::read_to_string(&lib_file).await {
Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Err(errors::LinkingError::LibFileNotFound(
lib_file.display().to_string(),
));
}
Err(e) => return Err(e.into()),
};
let types = spawn_blocking(move || get_file_types(&contents))
.await
.unwrap();
tracing::debug!("contains {} exported types", types.len());
types
} else {
vec![]
};
if let Some(build_files) = Some(&node.target)
.filter(|_| !node.node.pkg_ref.like_wally())
.and_then(|t| t.build_files())
{
execute_script(
ScriptName::RobloxSyncConfigGenerator,
&project,
LinkingExecuteScriptHooks,
std::iter::once(container_folder.as_os_str())
.chain(build_files.iter().map(OsStr::new)),
false,
)
.await
.map_err(errors::LinkingError::ExecuteScript)?;
} }
.instrument(tracing::info_span!(
"extract types", Ok((package_id, types))
name = name.to_string(), }
version_id = version_id.to_string() .instrument(span)
)) })
})) .collect::<JoinSet<_>>();
.await?
.into_iter() let mut package_types = HashMap::new();
.collect::<HashMap<_, _>>(), while let Some(task) = tasks.join_next().await {
)) let (version_id, types) = task.unwrap()?;
})) package_types.insert(version_id, types);
.await? }
.into_iter()
.collect::<HashMap<_, _>>();
// step 3. link all packages (and their dependencies), this time with types // step 3. link all packages (and their dependencies), this time with types
self.link(graph, &manifest, &Arc::new(package_types), true) self.link(&graph, &manifest, &Arc::new(package_types), true)
.await .await
} }
@ -164,10 +165,9 @@ impl Project {
root_container_folder: &Path, root_container_folder: &Path,
relative_container_folder: &Path, relative_container_folder: &Path,
node: &DownloadedDependencyGraphNode, node: &DownloadedDependencyGraphNode,
name: &PackageNames, package_id: &PackageId,
version_id: &VersionId,
alias: &str, alias: &str,
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>, package_types: &HashMap<PackageId, Vec<String>>,
manifest: &Manifest, manifest: &Manifest,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new(); static NO_TYPES: Vec<String> = Vec::new();
@ -184,10 +184,7 @@ impl Project {
relative_container_folder, relative_container_folder,
manifest, manifest,
)?, )?,
package_types package_types.get(package_id).unwrap_or(&NO_TYPES),
.get(name)
.and_then(|v| v.get(version_id))
.unwrap_or(&NO_TYPES),
); );
write_cas( write_cas(
@ -239,68 +236,65 @@ impl Project {
async fn link( async fn link(
&self, &self,
graph: &DownloadedGraph, graph: &Arc<DownloadedGraph>,
manifest: &Arc<Manifest>, manifest: &Arc<Manifest>,
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>, package_types: &Arc<HashMap<PackageId, Vec<String>>>,
is_complete: bool, is_complete: bool,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
try_join_all(graph.iter().flat_map(|(name, versions)| { let mut tasks = graph
versions.iter().map(|(version_id, node)| { .iter()
let name = name.clone(); .map(|(package_id, node)| {
let graph = graph.clone();
let manifest = manifest.clone(); let manifest = manifest.clone();
let package_types = package_types.clone(); let package_types = package_types.clone();
let span = tracing::info_span!( let span = tracing::info_span!("link", package_id = package_id.to_string());
"link", let package_id = package_id.clone();
name = name.to_string(), let node = node.clone();
version_id = version_id.to_string() let project = self.clone();
);
async move { async move {
let (node_container_folder, node_packages_folder) = { let (node_container_folder, node_packages_folder) = {
let base_folder = create_and_canonicalize( let base_folder = create_and_canonicalize(
self.package_dir() project.package_dir().join(
.join(manifest.target.kind().packages_folder(version_id.target())), manifest
.target
.kind()
.packages_folder(package_id.version_id().target()),
),
) )
.await?; .await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = node.node.container_folder( let container_folder = node
&packages_container_folder, .node
&name, .container_folder(&packages_container_folder, &package_id);
version_id.version(),
);
if let Some((alias, _, _)) = &node.node.direct { if let Some((alias, _, _)) = &node.node.direct {
self.link_files( project
&base_folder, .link_files(
&container_folder, &base_folder,
&base_folder, &container_folder,
container_folder.strip_prefix(&base_folder).unwrap(), &base_folder,
node, container_folder.strip_prefix(&base_folder).unwrap(),
&name, &node,
version_id, &package_id,
alias, alias,
&package_types, &package_types,
&manifest, &manifest,
) )
.await?; .await?;
} }
(container_folder, base_folder) (container_folder, base_folder)
}; };
for (dependency_name, (dependency_version_id, dependency_alias)) in for (dependency_id, dependency_alias) in &node.node.dependencies {
&node.node.dependencies let Some(dependency_node) = graph.get(dependency_id) else {
{
let Some(dependency_node) = graph
.get(dependency_name)
.and_then(|v| v.get(dependency_version_id))
else {
if is_complete { if is_complete {
return Err(errors::LinkingError::DependencyNotFound( return Err(errors::LinkingError::DependencyNotFound(
format!("{dependency_name}@{dependency_version_id}"), dependency_id.to_string(),
format!("{name}@{version_id}"), package_id.to_string(),
)); ));
} }
@ -308,51 +302,54 @@ impl Project {
}; };
let base_folder = create_and_canonicalize( let base_folder = create_and_canonicalize(
self.package_dir().join( project.package_dir().join(
version_id package_id
.version_id()
.target() .target()
.packages_folder(dependency_version_id.target()), .packages_folder(dependency_id.version_id().target()),
), ),
) )
.await?; .await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = dependency_node.node.container_folder( let container_folder = dependency_node
&packages_container_folder, .node
dependency_name, .container_folder(&packages_container_folder, dependency_id);
dependency_version_id.version(),
);
let linker_folder = create_and_canonicalize( let linker_folder = create_and_canonicalize(node_container_folder.join(
node_container_folder.join( node.node.base_folder(
node.node package_id.version_id(),
.base_folder(version_id, dependency_node.target.kind()), dependency_node.target.kind(),
), ),
) ))
.await?; .await?;
self.link_files( project
&linker_folder, .link_files(
&container_folder, &linker_folder,
&node_packages_folder, &container_folder,
container_folder.strip_prefix(&base_folder).unwrap(), &node_packages_folder,
dependency_node, container_folder.strip_prefix(&base_folder).unwrap(),
dependency_name, dependency_node,
dependency_version_id, dependency_id,
dependency_alias, dependency_alias,
&package_types, &package_types,
&manifest, &manifest,
) )
.await?; .await?;
} }
Ok(()) Ok(())
} }
.instrument(span) .instrument(span)
}) })
})) .collect::<JoinSet<_>>();
.await
.map(|_| ()) while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
Ok(())
} }
} }

View file

@ -1,13 +1,16 @@
#![allow(deprecated)]
use crate::{ use crate::{
manifest::{ manifest::{
overrides::OverrideKey, overrides::OverrideKey,
target::{Target, TargetKind}, target::{Target, TargetKind},
DependencyType, DependencyType,
}, },
names::{PackageName, PackageNames}, names::PackageName,
source::{ source::{
refs::PackageRefs, specifiers::DependencySpecifiers, traits::PackageRef, ids::{PackageId, VersionId},
version_id::VersionId, refs::PackageRefs,
specifiers::DependencySpecifiers,
traits::PackageRef,
}, },
}; };
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
@ -19,7 +22,7 @@ use std::{
}; };
/// A graph of dependencies /// A graph of dependencies
pub type Graph<Node> = BTreeMap<PackageNames, BTreeMap<VersionId, Node>>; pub type Graph<Node> = BTreeMap<PackageId, Node>;
/// A dependency graph node /// A dependency graph node
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
@ -29,7 +32,7 @@ pub struct DependencyGraphNode {
pub direct: Option<(String, DependencySpecifiers, DependencyType)>, pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package /// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")] #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>, pub dependencies: BTreeMap<PackageId, String>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency /// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType, pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on /// Whether the resolved type should be Peer if this isn't depended on
@ -49,18 +52,15 @@ impl DependencyGraphNode {
} }
/// Returns the folder to store the contents of the package in /// Returns the folder to store the contents of the package in
pub fn container_folder<P: AsRef<Path>>( pub fn container_folder<P: AsRef<Path>>(&self, path: &P, package_id: &PackageId) -> PathBuf {
&self, let (name, version) = package_id.parts();
path: &P,
name: &PackageNames,
version: &Version,
) -> PathBuf {
if self.pkg_ref.like_wally() { if self.pkg_ref.like_wally() {
return path return path
.as_ref() .as_ref()
.join(format!( .join(format!(
"{}_{}@{}", "{}_{}@{}",
name.as_str().0, package_id.name().as_str().0,
name.as_str().1, name.as_str().1,
version version
)) ))
@ -111,3 +111,118 @@ pub struct Lockfile {
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")] #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub graph: DownloadedGraph, pub graph: DownloadedGraph,
} }
/// Old lockfile stuff. Will be removed in a future version.
#[deprecated(
note = "Intended to be used to migrate old lockfiles to the new format. Will be removed in a future version."
)]
pub mod old {
use crate::{
manifest::{
overrides::OverrideKey,
target::{Target, TargetKind},
DependencyType,
},
names::{PackageName, PackageNames},
source::{
ids::{PackageId, VersionId},
refs::PackageRefs,
specifiers::DependencySpecifiers,
},
};
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
/// An old dependency graph node
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DependencyGraphNodeOld {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub is_peer: bool,
/// The package reference
pub pkg_ref: PackageRefs,
}
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DownloadedDependencyGraphNodeOld {
/// The target of the package
pub target: Target,
/// The node
#[serde(flatten)]
pub node: DependencyGraphNodeOld,
}
/// An old version of a lockfile
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LockfileOld {
/// The name of the package
pub name: PackageName,
/// The version of the package
pub version: Version,
/// The target of the package
pub target: TargetKind,
/// The overrides of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
/// The workspace members
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
/// The graph of dependencies
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub graph: BTreeMap<PackageNames, BTreeMap<VersionId, DownloadedDependencyGraphNodeOld>>,
}
impl LockfileOld {
/// Converts this lockfile to a new lockfile
pub fn to_new(self) -> super::Lockfile {
super::Lockfile {
name: self.name,
version: self.version,
target: self.target,
overrides: self.overrides,
workspace: self.workspace,
graph: self
.graph
.into_iter()
.flat_map(|(name, versions)| {
versions.into_iter().map(move |(version, node)| {
(
PackageId(name.clone(), version),
super::DownloadedDependencyGraphNode {
target: node.target,
node: super::DependencyGraphNode {
direct: node.node.direct,
dependencies: node
.node
.dependencies
.into_iter()
.map(|(name, (version, alias))| {
(PackageId(name, version), alias)
})
.collect(),
resolved_ty: node.node.resolved_ty,
is_peer: node.node.is_peer,
pkg_ref: node.node.pkg_ref,
},
},
)
})
})
.collect(),
}
}
}
}

View file

@ -78,12 +78,12 @@ pub struct Manifest {
#[cfg_attr( #[cfg_attr(
feature = "schema", feature = "schema",
schemars( schemars(
with = "BTreeMap<crate::names::PackageNames, BTreeMap<crate::source::version_id::VersionId, std::path::PathBuf>>" with = "BTreeMap<crate::names::PackageNames, BTreeMap<crate::source::ids::VersionId, std::path::PathBuf>>"
) )
)] )]
pub patches: BTreeMap< pub patches: BTreeMap<
crate::names::PackageNames, crate::names::PackageNames,
BTreeMap<crate::source::version_id::VersionId, RelativePathBuf>, BTreeMap<crate::source::ids::VersionId, RelativePathBuf>,
>, >,
#[serde(default, skip_serializing)] #[serde(default, skip_serializing)]
/// Which version of the pesde CLI this package uses /// Which version of the pesde CLI this package uses

View file

@ -1,6 +1,7 @@
use crate::{ use crate::{
lockfile::DownloadedGraph, lockfile::DownloadedGraph,
reporters::{PatchProgressReporter, PatchesReporter}, reporters::{PatchProgressReporter, PatchesReporter},
source::ids::PackageId,
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME, Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
@ -93,12 +94,10 @@ impl Project {
for (version_id, patch_path) in versions { for (version_id, patch_path) in versions {
let patch_path = patch_path.to_path(self.package_dir()); let patch_path = patch_path.to_path(self.package_dir());
let Some(node) = graph let package_id = PackageId::new(name.clone(), version_id);
.get(&name) let Some(node) = graph.get(&package_id) else {
.and_then(|versions| versions.get(&version_id))
else {
tracing::warn!( tracing::warn!(
"patch for {name}@{version_id} not applied because it is not in the graph" "patch for {package_id} not applied because it is not in the graph"
); );
continue; continue;
}; };
@ -106,25 +105,24 @@ impl Project {
let container_folder = node.node.container_folder( let container_folder = node.node.container_folder(
&self &self
.package_dir() .package_dir()
.join(manifest.target.kind().packages_folder(version_id.target())) .join(
manifest
.target
.kind()
.packages_folder(package_id.version_id().target()),
)
.join(PACKAGES_CONTAINER_NAME), .join(PACKAGES_CONTAINER_NAME),
&name, &package_id,
version_id.version(),
); );
let reporter = reporter.clone(); let reporter = reporter.clone();
let span = tracing::info_span!( let span = tracing::info_span!("apply patch", package_id = package_id.to_string(),);
"apply patch",
name = name.to_string(),
version_id = version_id.to_string()
);
let display_name = format!("{name}@{version_id}");
tasks.spawn( tasks.spawn(
async move { async move {
tracing::debug!("applying patch"); tracing::debug!("applying patch");
let progress_reporter = reporter.report_patch(&display_name); let progress_reporter = reporter.report_patch(&package_id.to_string());
let patch = fs::read(&patch_path) let patch = fs::read(&patch_path)
.await .await

View file

@ -1,12 +1,11 @@
use crate::{ use crate::{
lockfile::{DependencyGraph, DependencyGraphNode}, lockfile::{DependencyGraph, DependencyGraphNode},
manifest::{overrides::OverrideSpecifier, DependencyType}, manifest::{overrides::OverrideSpecifier, DependencyType},
names::PackageNames,
source::{ source::{
ids::PackageId,
pesde::PesdePackageSource, pesde::PesdePackageSource,
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions}, traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
version_id::VersionId,
PackageSources, PackageSources,
}, },
Project, RefreshedSources, DEFAULT_INDEX_NAME, Project, RefreshedSources, DEFAULT_INDEX_NAME,
@ -16,22 +15,17 @@ use tracing::{instrument, Instrument};
fn insert_node( fn insert_node(
graph: &mut DependencyGraph, graph: &mut DependencyGraph,
name: &PackageNames, package_id: &PackageId,
version: &VersionId,
mut node: DependencyGraphNode, mut node: DependencyGraphNode,
is_top_level: bool, is_top_level: bool,
) { ) {
if !is_top_level && node.direct.take().is_some() { if !is_top_level && node.direct.take().is_some() {
tracing::debug!( tracing::debug!(
"tried to insert {name}@{version} as direct dependency from a non top-level context", "tried to insert {package_id} as direct dependency from a non top-level context",
); );
} }
match graph match graph.entry(package_id.clone()) {
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
entry.insert(node); entry.insert(node);
} }
@ -40,7 +34,7 @@ fn insert_node(
match (&current_node.direct, &node.direct) { match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => { (Some(_), Some(_)) => {
tracing::warn!("duplicate direct dependency for {name}@{version}"); tracing::warn!("duplicate direct dependency for {package_id}");
} }
(None, Some(_)) => { (None, Some(_)) => {
@ -85,83 +79,67 @@ impl Project {
let mut graph = DependencyGraph::default(); let mut graph = DependencyGraph::default();
if let Some(previous_graph) = previous_graph { if let Some(previous_graph) = previous_graph {
for (name, versions) in previous_graph { for (package_id, node) in previous_graph {
for (version, node) in versions { let Some((old_alias, specifier, source_ty)) = &node.direct else {
let Some((old_alias, specifier, source_ty)) = &node.direct else { // this is not a direct dependency, will be added if it's still being used later
// this is not a direct dependency, will be added if it's still being used later continue;
continue; };
};
if matches!(specifier, DependencySpecifiers::Workspace(_)) { if matches!(specifier, DependencySpecifiers::Workspace(_)) {
// workspace dependencies must always be resolved brand new // workspace dependencies must always be resolved brand new
continue; continue;
} }
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) else {
else { tracing::debug!(
tracing::debug!( "dependency {package_id} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
"dependency {name}@{version} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
); );
continue; continue;
}; };
let span = tracing::info_span!("resolve from old graph", alias); let span = tracing::info_span!("resolve from old graph", alias);
let _guard = span.enter(); let _guard = span.enter();
tracing::debug!("resolved {}@{} from old dependency graph", name, version); tracing::debug!("resolved {package_id} from old dependency graph");
insert_node( insert_node(
&mut graph, &mut graph,
name, package_id,
version, DependencyGraphNode {
DependencyGraphNode { direct: Some((alias.clone(), specifier.clone(), *source_ty)),
direct: Some((alias.clone(), specifier.clone(), *source_ty)), ..node.clone()
..node.clone() },
}, true,
true, );
);
let mut queue = node let mut queue = node
.dependencies .dependencies
.iter() .iter()
.map(|(name, (version, dep_alias))| { .map(|(id, dep_alias)| (id, vec![alias.to_string(), dep_alias.to_string()]))
( .collect::<VecDeque<_>>();
name,
version,
vec![alias.to_string(), dep_alias.to_string()],
)
})
.collect::<VecDeque<_>>();
while let Some((dep_name, dep_version, path)) = queue.pop_front() { while let Some((dep_id, path)) = queue.pop_front() {
let inner_span = let inner_span =
tracing::info_span!("resolve dependency", path = path.join(">")); tracing::info_span!("resolve dependency", path = path.join(">"));
let _inner_guard = inner_span.enter(); let _inner_guard = inner_span.enter();
if let Some(dep_node) = previous_graph if let Some(dep_node) = previous_graph.get(dep_id) {
.get(dep_name) tracing::debug!("resolved sub-dependency {dep_id}");
.and_then(|v| v.get(dep_version)) insert_node(&mut graph, dep_id, dep_node.clone(), false);
{
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
insert_node(&mut graph, dep_name, dep_version, dep_node.clone(), false);
dep_node dep_node
.dependencies .dependencies
.iter() .iter()
.map(|(name, (version, alias))| { .map(|(id, alias)| {
( (
name, id,
version, path.iter()
path.iter() .cloned()
.cloned() .chain(std::iter::once(alias.to_string()))
.chain(std::iter::once(alias.to_string())) .collect(),
.collect(), )
) })
}) .for_each(|dep| queue.push_back(dep));
.for_each(|dep| queue.push_back(dep)); } else {
} else { tracing::warn!("dependency {dep_id} not found in previous graph");
tracing::warn!(
"dependency {dep_name}@{dep_version} not found in previous graph"
);
}
} }
} }
} }
@ -173,7 +151,7 @@ impl Project {
( (
spec, spec,
ty, ty,
None::<(PackageNames, VersionId)>, None::<PackageId>,
vec![alias], vec![alias],
false, false,
manifest.target.kind(), manifest.target.kind(),
@ -260,17 +238,12 @@ impl Project {
.await .await
.map_err(|e| Box::new(e.into()))?; .map_err(|e| Box::new(e.into()))?;
let Some(target_version_id) = graph let Some(package_id) = graph
.get(&name) .keys()
.and_then(|versions| { .filter(|id| *id.name() == name && resolved.contains_key(id.version_id()))
versions .max()
.keys()
// only consider versions that are compatible with the specifier
.filter(|ver| resolved.contains_key(ver))
.max()
})
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
.cloned() .cloned()
.or_else(|| resolved.last_key_value().map(|(ver, _)| PackageId::new(name, ver.clone())))
else { else {
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion( return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
format!("{specifier} ({target})"), format!("{specifier} ({target})"),
@ -284,33 +257,22 @@ impl Project {
ty ty
}; };
if let Some((dependant_name, dependant_version_id)) = dependant { if let Some(dependant_id) = dependant {
graph graph
.get_mut(&dependant_name) .get_mut(&dependant_id)
.and_then(|versions| versions.get_mut(&dependant_version_id)) .expect("dependant package not found in graph")
.and_then(|node| { .dependencies
node.dependencies .insert(package_id.clone(), alias.clone());
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
});
} }
let pkg_ref = &resolved[&target_version_id]; let pkg_ref = &resolved[package_id.version_id()];
if let Some(already_resolved) = graph if let Some(already_resolved) = graph.get_mut(&package_id) {
.get_mut(&name) tracing::debug!("{package_id} already resolved");
.and_then(|versions| versions.get_mut(&target_version_id))
{
tracing::debug!(
"{}@{} already resolved",
name,
target_version_id
);
if std::mem::discriminant(&already_resolved.pkg_ref) if std::mem::discriminant(&already_resolved.pkg_ref) != std::mem::discriminant(pkg_ref) {
!= std::mem::discriminant(pkg_ref)
{
tracing::warn!( tracing::warn!(
"resolved package {name}@{target_version_id} has a different source than previously resolved one, this may cause issues", "resolved package {package_id} has a different source than previously resolved one, this may cause issues",
); );
} }
@ -346,17 +308,12 @@ impl Project {
}; };
insert_node( insert_node(
&mut graph, &mut graph,
&name, &package_id,
&target_version_id,
node, node,
depth == 0, depth == 0,
); );
tracing::debug!( tracing::debug!("resolved {package_id} from new dependency graph");
"resolved {}@{} from new dependency graph",
name,
target_version_id
);
for (dependency_alias, (dependency_spec, dependency_ty)) in for (dependency_alias, (dependency_spec, dependency_ty)) in
pkg_ref.dependencies().clone() pkg_ref.dependencies().clone()
@ -399,13 +356,13 @@ impl Project {
None => dependency_spec, None => dependency_spec,
}, },
dependency_ty, dependency_ty,
Some((name.clone(), target_version_id.clone())), Some(package_id.clone()),
path.iter() path.iter()
.cloned() .cloned()
.chain(std::iter::once(dependency_alias)) .chain(std::iter::once(dependency_alias))
.collect(), .collect(),
overridden.is_some(), overridden.is_some(),
*target_version_id.target(), *package_id.version_id().target(),
)); ));
} }
@ -415,15 +372,13 @@ impl Project {
.await?; .await?;
} }
for (name, versions) in &mut graph { for (id, node) in &mut graph {
for (version_id, node) in versions { if node.is_peer && node.direct.is_none() {
if node.is_peer && node.direct.is_none() { node.resolved_ty = DependencyType::Peer;
node.resolved_ty = DependencyType::Peer; }
}
if node.resolved_ty == DependencyType::Peer { if node.resolved_ty == DependencyType::Peer {
tracing::warn!("peer dependency {name}@{version_id} was not resolved"); tracing::warn!("peer dependency {id} was not resolved");
}
} }
} }

View file

@ -1,4 +1,4 @@
use crate::manifest::target::TargetKind; use crate::{manifest::target::TargetKind, names::PackageNames};
use semver::Version; use semver::Version;
use serde_with::{DeserializeFromStr, SerializeDisplay}; use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{fmt::Display, str::FromStr}; use std::{fmt::Display, str::FromStr};
@ -34,6 +34,11 @@ impl VersionId {
pub fn from_escaped(s: &str) -> Result<Self, errors::VersionIdParseError> { pub fn from_escaped(s: &str) -> Result<Self, errors::VersionIdParseError> {
VersionId::from_str(s.replacen('+', " ", 1).as_str()) VersionId::from_str(s.replacen('+', " ", 1).as_str())
} }
/// Access the parts of the version ID
pub fn parts(&self) -> (&Version, &TargetKind) {
(&self.0, &self.1)
}
} }
impl Display for VersionId { impl Display for VersionId {
@ -86,6 +91,55 @@ impl schemars::JsonSchema for VersionId {
} }
} }
/// A package ID, which is a combination of a name and a version ID
#[derive(
Debug, SerializeDisplay, DeserializeFromStr, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct PackageId(pub(crate) PackageNames, pub(crate) VersionId);
impl PackageId {
/// Creates a new package ID
pub fn new(names: PackageNames, version_id: VersionId) -> Self {
PackageId(names, version_id)
}
/// Access the name
pub fn name(&self) -> &PackageNames {
&self.0
}
/// Access the version ID
pub fn version_id(&self) -> &VersionId {
&self.1
}
/// Access the parts of the package ID
pub fn parts(&self) -> (&PackageNames, &VersionId) {
(&self.0, &self.1)
}
}
impl Display for PackageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@{}", self.0, self.1)
}
}
impl FromStr for PackageId {
type Err = errors::PackageIdParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let Some((names, version_id)) = s.split_once('@') else {
return Err(errors::PackageIdParseError::Malformed(s.to_string()));
};
let names = names.parse()?;
let version_id = version_id.parse()?;
Ok(PackageId(names, version_id))
}
}
/// Errors that can occur when using a version ID /// Errors that can occur when using a version ID
pub mod errors { pub mod errors {
use thiserror::Error; use thiserror::Error;
@ -106,4 +160,21 @@ pub mod errors {
#[error("malformed target")] #[error("malformed target")]
Target(#[from] crate::manifest::target::errors::TargetKindFromStr), Target(#[from] crate::manifest::target::errors::TargetKindFromStr),
} }
/// Errors that can occur when parsing a package ID
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum PackageIdParseError {
/// The package ID is malformed (not in the form `name@version`)
#[error("malformed package id {0}")]
Malformed(String),
/// The name is malformed
#[error("malformed name")]
Names(#[from] crate::names::errors::PackageNamesError),
/// The version ID is malformed
#[error("malformed version id")]
VersionId(#[from] VersionIdParseError),
}
} }

View file

@ -3,8 +3,8 @@ use crate::{
names::PackageNames, names::PackageNames,
reporters::DownloadProgressReporter, reporters::DownloadProgressReporter,
source::{ source::{
fs::PackageFS, refs::PackageRefs, specifiers::DependencySpecifiers, traits::*, fs::PackageFS, ids::VersionId, refs::PackageRefs, specifiers::DependencySpecifiers,
version_id::VersionId, traits::*,
}, },
}; };
use std::{collections::BTreeMap, fmt::Debug}; use std::{collections::BTreeMap, fmt::Debug};
@ -15,6 +15,8 @@ pub mod fs;
pub mod git; pub mod git;
/// Git index-based package source utilities /// Git index-based package source utilities
pub mod git_index; pub mod git_index;
/// Package identifiers for different contexts
pub mod ids;
/// The path package source /// The path package source
pub mod path; pub mod path;
/// The pesde package source /// The pesde package source
@ -25,8 +27,6 @@ pub mod refs;
pub mod specifiers; pub mod specifiers;
/// Traits for sources and packages /// Traits for sources and packages
pub mod traits; pub mod traits;
/// Version IDs
pub mod version_id;
/// The Wally package source /// The Wally package source
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
pub mod wally; pub mod wally;

View file

@ -5,10 +5,10 @@ use crate::{
reporters::DownloadProgressReporter, reporters::DownloadProgressReporter,
source::{ source::{
fs::PackageFS, fs::PackageFS,
ids::VersionId,
path::pkg_ref::PathPackageRef, path::pkg_ref::PathPackageRef,
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::{DownloadOptions, PackageSource, ResolveOptions}, traits::{DownloadOptions, PackageSource, ResolveOptions},
version_id::VersionId,
ResolveResult, ResolveResult,
}, },
DEFAULT_INDEX_NAME, DEFAULT_INDEX_NAME,

View file

@ -5,8 +5,8 @@ use crate::{
source::{ source::{
fs::{store_in_cas, FSEntry, PackageFS}, fs::{store_in_cas, FSEntry, PackageFS},
git_index::{read_file, root_tree, GitBasedSource}, git_index::{read_file, root_tree, GitBasedSource},
ids::VersionId,
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions}, traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
version_id::VersionId,
wally::{ wally::{
compat_util::get_target, compat_util::get_target,
manifest::{Realm, WallyManifest}, manifest::{Realm, WallyManifest},

View file

@ -5,9 +5,9 @@ use crate::{
reporters::DownloadProgressReporter, reporters::DownloadProgressReporter,
source::{ source::{
fs::PackageFS, fs::PackageFS,
ids::VersionId,
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
traits::{DownloadOptions, PackageSource, ResolveOptions}, traits::{DownloadOptions, PackageSource, ResolveOptions},
version_id::VersionId,
workspace::pkg_ref::WorkspacePackageRef, workspace::pkg_ref::WorkspacePackageRef,
ResolveResult, ResolveResult,
}, },