feat: add scripts packages

This commit is contained in:
daimond113 2024-12-07 15:08:52 +01:00
parent 5ba8c5dbb4
commit ac74c57709
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
23 changed files with 383 additions and 460 deletions

View file

@ -9,6 +9,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added ### Added
- Add improved CLI styling by @daimond113 - Add improved CLI styling by @daimond113
- Install pesde dependencies before Wally to support scripts packages by @daimond113 - Install pesde dependencies before Wally to support scripts packages by @daimond113
- Support packages exporting scripts by @daimond113
### Removed
- Remove special scripts repo handling to favour standard packages by @daimond113
### Fixed ### Fixed
- Link dependencies before type extraction to support more use cases by @daimond113 - Link dependencies before type extraction to support more use cases by @daimond113

View file

@ -3,7 +3,12 @@
href="https://pesde.daimond113.com/" href="https://pesde.daimond113.com/"
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80" class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
> >
<svg viewBox="0 0 56 28" class="h-7" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg
viewBox="0 0 56 28"
class="h-7"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<title>pesde</title> <title>pesde</title>
<path <path
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z" d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
@ -22,7 +27,8 @@
fill="currentColor"></path> fill="currentColor"></path>
</svg> </svg>
</a> </a>
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span> <span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
>
<a <a
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg" class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
href="/">docs</a href="/">docs</a

View file

@ -14,9 +14,6 @@ repository contains a list of scripts for different sync tools. If the tool
you are using is not supported, you can write your own script and submit a PR you are using is not supported, you can write your own script and submit a PR
to get it added. to get it added.
These scripts are automatically cloned into the `~/.pesde/scripts` folder and
kept up to date when you use pesde.
## Usage with Rojo ## Usage with Rojo
[Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox [Rojo](https://rojo.space/) is a popular tool for syncing files into Roblox
@ -34,8 +31,8 @@ optionally submit a PR to help others using the same tool as you get started
quicker. quicker.
Scaffold your project with `pesde init`, select the `roblox` or `roblox_server` Scaffold your project with `pesde init`, select the `roblox` or `roblox_server`
target, and then replace the `.pesde/roblox_sync_config_generator.luau` script target, and then create a `.pesde/roblox_sync_config_generator.luau` script
with the one you want to use. and put it's path in the manifest.
## Authoring packages ## Authoring packages

View file

@ -27,13 +27,13 @@ environment we're targeting is `luau`.
```sh ```sh
pesde init pesde init
# What is the name of the project? <username>/hello_pesde # what is the name of the project? <username>/hello_pesde
# What is the description of the project? (leave empty for none) # what is the description of the project?
# Who are the authors of this project? (leave empty for none, comma separated) # who are the authors of this project?
# What is the repository URL of this project? (leave empty for none) # what is the repository URL of this project?
# What is the license of this project? (leave empty for none) MIT # what is the license of this project? MIT
# What environment are you targeting for your package? luau # what environment are you targeting for your package? luau
# Would you like to setup a default roblox_sync_config_generator script? No # would you like to setup default Roblox compatibility scripts? No
``` ```
The command will create a `pesde.toml` file in the current folder. Go ahead The command will create a `pesde.toml` file in the current folder. Go ahead

View file

@ -55,19 +55,6 @@ is printed.
The default index is [`pesde-index`](https://github.com/pesde-pkg/index). The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
### `pesde config scripts-repo`
```sh
pesde config scripts-repo [REPO]
```
Configures the scripts repository. If no repository is provided, the current
scripts repository is printed.
- `-r, --reset`: Resets the scripts repository.
The default scripts repository is [`pesde-scripts`](https://github.com/pesde-pkg/scripts).
## `pesde init` ## `pesde init`
Initializes a new pesde project in the current directory. Initializes a new pesde project in the current directory.

View file

@ -190,7 +190,7 @@ for various sync tools.
<LinkCard <LinkCard
title="Example script for Rojo" title="Example script for Rojo"
description="An example script for generating configuration for Rojo." description="An example script for generating configuration for Rojo."
href="https://github.com/pesde-pkg/scripts/blob/master/lune/rojo/roblox_sync_config_generator.luau" href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sync_config.luau"
/> />
### `sourcemap_generator` ### `sourcemap_generator`
@ -205,7 +205,7 @@ through `process.args`.
<LinkCard <LinkCard
title="Example script for Rojo" title="Example script for Rojo"
description="An example script for generating configuration for Rojo." description="An example script for generating configuration for Rojo."
href="https://github.com/pesde-pkg/scripts/blob/master/lune/rojo/sourcemap_generator.luau" href="https://github.com/pesde-pkg/scripts/blob/master/src/generators/rojo/sourcemap.luau"
/> />
## `[indices]` ## `[indices]`

View file

@ -1,22 +1,17 @@
use clap::Subcommand; use clap::Subcommand;
mod default_index; mod default_index;
mod scripts_repo;
#[derive(Debug, Subcommand)] #[derive(Debug, Subcommand)]
pub enum ConfigCommands { pub enum ConfigCommands {
/// Configuration for the default index /// Configuration for the default index
DefaultIndex(default_index::DefaultIndexCommand), DefaultIndex(default_index::DefaultIndexCommand),
/// Configuration for the scripts repository
ScriptsRepo(scripts_repo::ScriptsRepoCommand),
} }
impl ConfigCommands { impl ConfigCommands {
pub async fn run(self) -> anyhow::Result<()> { pub async fn run(self) -> anyhow::Result<()> {
match self { match self {
ConfigCommands::DefaultIndex(default_index) => default_index.run().await, ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run().await,
} }
} }
} }

View file

@ -1,48 +0,0 @@
use crate::cli::{
config::{read_config, write_config, CliConfig},
home_dir,
};
use anyhow::Context;
use clap::Args;
use fs_err::tokio as fs;
#[derive(Debug, Args)]
pub struct ScriptsRepoCommand {
/// The new repo URL to set as default, don't pass any value to check the current default repo
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
repo: Option<gix::Url>,
/// Resets the default repo to the default value
#[arg(short, long, conflicts_with = "repo")]
reset: bool,
}
impl ScriptsRepoCommand {
pub async fn run(self) -> anyhow::Result<()> {
let mut config = read_config().await?;
let repo = if self.reset {
Some(CliConfig::default().scripts_repo)
} else {
self.repo
};
match repo {
Some(repo) => {
config.scripts_repo = repo.clone();
write_config(&config).await?;
fs::remove_dir_all(home_dir()?.join("scripts"))
.await
.context("failed to remove scripts directory")?;
println!("scripts repo set to: {repo}");
}
None => {
println!("current scripts repo: {}", config.scripts_repo);
}
}
Ok(())
}
}

View file

@ -1,30 +1,25 @@
use std::{path::Path, str::FromStr}; use crate::cli::config::read_config;
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use inquire::validator::Validation; use inquire::validator::Validation;
use pesde::{ use pesde::{
errors::ManifestReadError, names::PackageName, scripts::ScriptName, Project, DEFAULT_INDEX_NAME, errors::ManifestReadError,
manifest::target::TargetKind,
names::PackageName,
source::{
git_index::GitBasedSource,
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
traits::PackageSource,
},
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
}; };
use semver::VersionReq;
use crate::cli::{config::read_config, HOME_DIR}; use std::{collections::HashSet, str::FromStr};
use fs_err::tokio as fs;
#[derive(Debug, Args)] #[derive(Debug, Args)]
pub struct InitCommand {} pub struct InitCommand {}
fn script_contents(path: &Path) -> String {
format!(
r#"local process = require("@lune/process")
local home_dir = if process.os == "windows" then process.env.userprofile else process.env.HOME
require(home_dir .. {:?})"#,
format!("/{HOME_DIR}/scripts/{}", path.display())
)
}
impl InitCommand { impl InitCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest().await { match project.read_manifest().await {
@ -39,7 +34,7 @@ impl InitCommand {
let mut manifest = toml_edit::DocumentMut::new(); let mut manifest = toml_edit::DocumentMut::new();
manifest["name"] = toml_edit::value( manifest["name"] = toml_edit::value(
inquire::Text::new("What is the name of the project?") inquire::Text::new("what is the name of the project?")
.with_validator(|name: &str| { .with_validator(|name: &str| {
Ok(match PackageName::from_str(name) { Ok(match PackageName::from_str(name) {
Ok(_) => Validation::Valid, Ok(_) => Validation::Valid,
@ -51,8 +46,8 @@ impl InitCommand {
); );
manifest["version"] = toml_edit::value("0.1.0"); manifest["version"] = toml_edit::value("0.1.0");
let description = let description = inquire::Text::new("what is the description of the project?")
inquire::Text::new("What is the description of the project? (leave empty for none)") .with_help_message("a short description of the project. leave empty for none")
.prompt() .prompt()
.unwrap(); .unwrap();
@ -60,9 +55,8 @@ impl InitCommand {
manifest["description"] = toml_edit::value(description); manifest["description"] = toml_edit::value(description);
} }
let authors = inquire::Text::new( let authors = inquire::Text::new("who are the authors of this project?")
"Who are the authors of this project? (leave empty for none, comma separated)", .with_help_message("comma separated list. leave empty for none")
)
.prompt() .prompt()
.unwrap(); .unwrap();
@ -76,9 +70,7 @@ impl InitCommand {
manifest["authors"] = toml_edit::value(authors); manifest["authors"] = toml_edit::value(authors);
} }
let repo = inquire::Text::new( let repo = inquire::Text::new("what is the repository URL of this project?")
"What is the repository URL of this project? (leave empty for none)",
)
.with_validator(|repo: &str| { .with_validator(|repo: &str| {
if repo.is_empty() { if repo.is_empty() {
return Ok(Validation::Valid); return Ok(Validation::Valid);
@ -89,15 +81,16 @@ impl InitCommand {
Err(e) => Validation::Invalid(e.to_string().into()), Err(e) => Validation::Invalid(e.to_string().into()),
}) })
}) })
.with_help_message("leave empty for none")
.prompt() .prompt()
.unwrap(); .unwrap();
if !repo.is_empty() { if !repo.is_empty() {
manifest["repository"] = toml_edit::value(repo); manifest["repository"] = toml_edit::value(repo);
} }
let license = let license = inquire::Text::new("what is the license of this project?")
inquire::Text::new("What is the license of this project? (leave empty for none)")
.with_initial_value("MIT") .with_initial_value("MIT")
.with_help_message("an SPDX license identifier. leave empty for none")
.prompt() .prompt()
.unwrap(); .unwrap();
if !license.is_empty() { if !license.is_empty() {
@ -105,77 +98,89 @@ impl InitCommand {
} }
let target_env = inquire::Select::new( let target_env = inquire::Select::new(
"What environment are you targeting for your package?", "what environment are you targeting for your package?",
vec!["roblox", "roblox_server", "lune", "luau"], TargetKind::VARIANTS.to_vec(),
) )
.prompt() .prompt()
.unwrap(); .unwrap();
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())) manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
["environment"] = toml_edit::value(target_env); ["environment"] = toml_edit::value(target_env.to_string());
if target_env == "roblox" let source = PesdePackageSource::new(read_config().await?.default_index);
|| target_env == "roblox_server"
|| inquire::Confirm::new(&format!(
"Would you like to setup a default {} script?",
ScriptName::RobloxSyncConfigGenerator
))
.prompt()
.unwrap()
{
let folder = project
.package_dir()
.join(concat!(".", env!("CARGO_PKG_NAME")));
fs::create_dir_all(&folder)
.await
.context("failed to create scripts folder")?;
fs::write(
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",
ScriptName::RobloxSyncConfigGenerator
))),
)
.await
.context("failed to write sync config generator script file")?;
#[cfg(feature = "wally-compat")]
fs::write(
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",
ScriptName::SourcemapGenerator
))),
)
.await
.context("failed to write sourcemap generator script file")?;
let scripts =
manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
scripts[&ScriptName::RobloxSyncConfigGenerator.to_string()] =
toml_edit::value(format!(
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
ScriptName::RobloxSyncConfigGenerator
));
#[cfg(feature = "wally-compat")]
{
scripts[&ScriptName::SourcemapGenerator.to_string()] = toml_edit::value(format!(
concat!(".", env!("CARGO_PKG_NAME"), "/{}.luau"),
ScriptName::SourcemapGenerator
));
}
}
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())) manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[DEFAULT_INDEX_NAME] = [DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
toml_edit::value(read_config().await?.default_index.to_bstring().to_string());
if target_env.is_roblox()
|| inquire::prompt_confirmation(
"would you like to setup default Roblox compatibility scripts?",
)
.unwrap()
{
PackageSource::refresh(&source, &project)
.await
.context("failed to refresh package source")?;
let config = source
.config(&project)
.await
.context("failed to get source config")?;
if let Some(scripts_pkg_name) = config.scripts_package {
let (v_id, pkg_ref) = source
.resolve(
&PesdeDependencySpecifier {
name: scripts_pkg_name,
version: VersionReq::STAR,
index: None,
target: None,
},
&project,
TargetKind::Lune,
&mut HashSet::new(),
)
.await
.context("failed to resolve scripts package")?
.1
.pop_last()
.context("scripts package not found")?;
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
};
let scripts_field = &mut manifest["scripts"]
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
for script_name in scripts.keys() {
scripts_field[script_name] = toml_edit::value(format!(
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
));
}
let field = &mut manifest["dev_dependencies"]
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))["scripts"];
field["name"] = toml_edit::value(pkg_ref.name.to_string());
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
field["target"] = toml_edit::value(v_id.target().to_string());
} else {
println!(
"{}",
"configured index hasn't a configured scripts package".red()
);
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
return Ok(());
}
}
}
project.write_manifest(manifest.to_string()).await?; project.write_manifest(manifest.to_string()).await?;
println!("{}", "initialized project".green()); println!(
"{}\n{}: run `install` to fully finish setup",
"initialized project".green(),
"tip".cyan().bold()
);
Ok(()) Ok(())
} }
} }

View file

@ -1,6 +1,5 @@
use crate::cli::{ use crate::cli::{
bin_dir, files::make_executable, progress_bar, repos::update_scripts, run_on_workspace_members, bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
up_to_date_lockfile,
}; };
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
@ -139,9 +138,6 @@ impl InstallCommand {
} }
}; };
let project_2 = project.clone();
let update_scripts_handle = tokio::spawn(async move { update_scripts(&project_2).await });
println!( println!(
"\n{}\n", "\n{}\n",
format!("[now installing {} {}]", manifest.name, manifest.target) format!("[now installing {} {}]", manifest.name, manifest.target)
@ -204,8 +200,6 @@ impl InstallCommand {
.context("failed to build dependency graph")?; .context("failed to build dependency graph")?;
let graph = Arc::new(graph); let graph = Arc::new(graph);
update_scripts_handle.await??;
let bin_folder = bin_dir().await?; let bin_folder = bin_dir().await?;
let downloaded_graph = { let downloaded_graph = {

View file

@ -101,15 +101,21 @@ impl PublishCommand {
} }
} }
let canonical_package_dir = project
.package_dir()
.canonicalize()
.context("failed to canonicalize package directory")?;
let mut archive = tokio_tar::Builder::new( let mut archive = tokio_tar::Builder::new(
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best), async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
); );
let mut display_build_files: Vec<String> = vec![]; let mut display_build_files: Vec<String> = vec![];
let (lib_path, bin_path, target_kind) = ( let (lib_path, bin_path, scripts, target_kind) = (
manifest.target.lib_path().cloned(), manifest.target.lib_path().cloned(),
manifest.target.bin_path().cloned(), manifest.target.bin_path().cloned(),
manifest.target.scripts().cloned(),
manifest.target.kind(), manifest.target.kind(),
); );
@ -188,21 +194,24 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
continue; continue;
}; };
let export_path = relative_export_path let export_path = relative_export_path.to_path(&canonical_package_dir);
.to_path(project.package_dir())
.canonicalize()
.context(format!("failed to canonicalize {name}"))?;
if !export_path.exists() {
anyhow::bail!("{name} points to non-existent file");
}
if !export_path.is_file() { let contents = match fs::read_to_string(&export_path).await {
Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
anyhow::bail!("{name} does not exist");
}
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
anyhow::bail!("{name} must point to a file"); anyhow::bail!("{name} must point to a file");
} }
Err(e) => {
return Err(e).context(format!("failed to read {name}"));
}
};
let contents = fs::read_to_string(&export_path) let export_path = export_path
.await .canonicalize()
.context(format!("failed to read {name}"))?; .context(format!("failed to canonicalize {name}"))?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| { if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter() errs.into_iter()
@ -223,7 +232,12 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
_ => anyhow::bail!("{name} must be within project directory"), _ => anyhow::bail!("{name} must be within project directory"),
}; };
if paths.insert(PathBuf::from(relative_export_path.as_str())) { if paths.insert(
export_path
.strip_prefix(&canonical_package_dir)
.unwrap()
.to_path_buf(),
) {
println!( println!(
"{}: {name} was not included, adding {relative_export_path}", "{}: {name} was not included, adding {relative_export_path}",
"warn".yellow().bold() "warn".yellow().bold()
@ -270,6 +284,50 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
} }
} }
if let Some(scripts) = scripts {
for (name, path) in scripts {
let script_path = path.to_path(&canonical_package_dir);
let contents = match fs::read_to_string(&script_path).await {
Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
anyhow::bail!("script {name} does not exist");
}
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => {
anyhow::bail!("script {name} must point to a file");
}
Err(e) => {
return Err(e).context(format!("failed to read script {name}"));
}
};
let script_path = script_path
.canonicalize()
.context(format!("failed to canonicalize script {name}"))?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.join(", ")
}) {
anyhow::bail!("script {name} is not a valid Luau file: {err}");
}
if paths.insert(
script_path
.strip_prefix(&canonical_package_dir)
.unwrap()
.to_path_buf(),
) {
println!(
"{}: script {name} was not included, adding {path}",
"warn".yellow().bold()
);
}
}
}
for relative_path in &paths { for relative_path in &paths {
let path = project.package_dir().join(relative_path); let path = project.package_dir().join(relative_path);

View file

@ -1,4 +1,4 @@
use crate::cli::{repos::update_scripts, up_to_date_lockfile}; use crate::cli::up_to_date_lockfile;
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use futures::{StreamExt, TryStreamExt}; use futures::{StreamExt, TryStreamExt};
@ -27,10 +27,6 @@ pub struct RunCommand {
impl RunCommand { impl RunCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
let run = |root: PathBuf, file_path: PathBuf| { let run = |root: PathBuf, file_path: PathBuf| {
let fut = update_scripts(&project);
async move {
fut.await.expect("failed to update scripts");
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile"); let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
caller caller
.write_all( .write_all(
@ -54,7 +50,6 @@ impl RunCommand {
drop(caller); drop(caller);
std::process::exit(status.code().unwrap_or(1)) std::process::exit(status.code().unwrap_or(1))
}
}; };
let Some(package_or_script) = self.package_or_script else { let Some(package_or_script) = self.package_or_script else {
@ -62,8 +57,7 @@ impl RunCommand {
run( run(
project.package_dir().to_owned(), project.package_dir().to_owned(),
script_path.to_path(project.package_dir()), script_path.to_path(project.package_dir()),
) );
.await;
return Ok(()); return Ok(());
} }
@ -105,7 +99,7 @@ impl RunCommand {
let path = bin_path.to_path(&container_folder); let path = bin_path.to_path(&container_folder);
run(path.clone(), path).await; run(path.clone(), path);
return Ok(()); return Ok(());
} }
} }
@ -115,8 +109,7 @@ impl RunCommand {
run( run(
project.package_dir().to_path_buf(), project.package_dir().to_path_buf(),
script_path.to_path(project.package_dir()), script_path.to_path(project.package_dir()),
) );
.await;
return Ok(()); return Ok(());
} }
}; };
@ -177,7 +170,7 @@ impl RunCommand {
project.package_dir().to_path_buf() project.package_dir().to_path_buf()
}; };
run(root, path).await; run(root, path);
Ok(()) Ok(())
} }

View file

@ -1,4 +1,4 @@
use crate::cli::{progress_bar, repos::update_scripts, run_on_workspace_members}; use crate::cli::{progress_bar, run_on_workspace_members};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
@ -37,8 +37,6 @@ impl UpdateCommand {
.context("failed to build dependency graph")?; .context("failed to build dependency graph")?;
let graph = Arc::new(graph); let graph = Arc::new(graph);
update_scripts(&project).await?;
project project
.write_lockfile(Lockfile { .write_lockfile(Lockfile {
name: manifest.name, name: manifest.name,

View file

@ -4,17 +4,13 @@ use fs_err::tokio as fs;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct CliConfig { pub struct CliConfig {
#[serde( #[serde(
serialize_with = "crate::util::serialize_gix_url", serialize_with = "crate::util::serialize_gix_url",
deserialize_with = "crate::util::deserialize_gix_url" deserialize_with = "crate::util::deserialize_gix_url"
)] )]
pub default_index: gix::Url, pub default_index: gix::Url,
#[serde(
serialize_with = "crate::util::serialize_gix_url",
deserialize_with = "crate::util::deserialize_gix_url"
)]
pub scripts_repo: gix::Url,
pub tokens: Tokens, pub tokens: Tokens,
@ -26,7 +22,6 @@ impl Default for CliConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(), default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
scripts_repo: "https://github.com/pesde-pkg/scripts".try_into().unwrap(),
tokens: Tokens(Default::default()), tokens: Tokens(Default::default()),

View file

@ -24,7 +24,6 @@ pub mod auth;
pub mod commands; pub mod commands;
pub mod config; pub mod config;
pub mod files; pub mod files;
pub mod repos;
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
pub mod version; pub mod version;

View file

@ -1,143 +0,0 @@
use crate::{
cli::{config::read_config, home_dir},
util::authenticate_conn,
};
use anyhow::Context;
use fs_err::tokio as fs;
use gix::remote::{fetch::Shallow, Direction};
use pesde::Project;
use std::{path::Path, sync::atomic::AtomicBool};
use tokio::{runtime::Handle, task::spawn_blocking};
async fn update_repo<P: AsRef<Path>>(
name: &str,
path: P,
url: gix::Url,
project: &Project,
) -> anyhow::Result<()> {
let path = path.as_ref();
let should_update = path.exists();
let (repo, oid) = if should_update {
let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
let remote = repo
.find_default_remote(Direction::Fetch)
.context(format!("missing default remote of {name} repository"))?
.context(format!(
"failed to find default remote of {name} repository"
))?;
let mut connection = remote.connect(Direction::Fetch).context(format!(
"failed to connect to default remote of {name} repository"
))?;
authenticate_conn(&mut connection, project.auth_config());
let results = connection
.prepare_fetch(gix::progress::Discard, Default::default())
.context(format!("failed to prepare {name} repository fetch"))?
.with_shallow(Shallow::Deepen(1))
.receive(gix::progress::Discard, &false.into())
.context(format!("failed to receive new {name} repository contents"))?;
let remote_ref = results
.ref_map
.remote_refs
.first()
.context(format!("failed to get remote refs of {name} repository"))?;
let unpacked = remote_ref.unpack();
let oid = unpacked
.1
.or(unpacked.2)
.context("couldn't find oid in remote ref")?;
(repo, gix::ObjectId::from(oid))
} else {
fs::create_dir_all(path)
.await
.context(format!("failed to create {name} directory"))?;
let repo = gix::prepare_clone(url, path)
.context(format!("failed to prepare {name} repository clone"))?
.with_shallow(Shallow::Deepen(1))
.fetch_only(gix::progress::Discard, &false.into())
.context(format!("failed to fetch and checkout {name} repository"))?
.0;
let oid = {
let mut head = repo
.head()
.context(format!("failed to get {name} repository head"))?;
let obj = head
.peel_to_object_in_place()
.context(format!("failed to peel {name} repository head to object"))?;
obj.id
};
(repo, oid)
};
let tree = repo
.find_object(oid)
.context(format!("failed to find {name} repository tree"))?
.peel_to_tree()
.context(format!("failed to peel {name} repository object to tree"))?;
let mut index = gix::index::File::from_state(
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
format!("failed to create index state from {name} repository tree"),
)?,
repo.index_path(),
);
let opts = gix::worktree::state::checkout::Options {
overwrite_existing: true,
destination_is_initially_empty: !should_update,
..Default::default()
};
gix::worktree::state::checkout(
&mut index,
repo.work_dir().context(format!("{name} repo is bare"))?,
repo.objects
.clone()
.into_arc()
.context("failed to clone objects")?,
&gix::progress::Discard,
&gix::progress::Discard,
&false.into(),
opts,
)
.context(format!("failed to checkout {name} repository"))?;
index
.write(gix::index::write::Options::default())
.context("failed to write index")
}
static SCRIPTS_UPDATED: AtomicBool = AtomicBool::new(false);
pub async fn update_scripts(project: &Project) -> anyhow::Result<()> {
if SCRIPTS_UPDATED.swap(true, std::sync::atomic::Ordering::Relaxed) {
return Ok(());
}
let home_dir = home_dir()?;
let config = read_config().await?;
let project = project.clone();
spawn_blocking(move || {
Handle::current().block_on(update_repo(
"scripts",
home_dir.join("scripts"),
config.scripts_repo,
&project,
))
})
.await??;
Ok(())
}

View file

@ -148,6 +148,7 @@ pub mod errors {
/// An error that can occur when downloading and linking dependencies /// An error that can occur when downloading and linking dependencies
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadAndLinkError<E> { pub enum DownloadAndLinkError<E> {
/// An error occurred while downloading the graph /// An error occurred while downloading the graph
#[error("error downloading graph")] #[error("error downloading graph")]

View file

@ -50,6 +50,8 @@ pub const DEFAULT_INDEX_NAME: &str = "default";
/// The name of the packages container /// The name of the packages container
pub const PACKAGES_CONTAINER_NAME: &str = ".pesde"; pub const PACKAGES_CONTAINER_NAME: &str = ".pesde";
pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found"; pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found";
/// The folder in which scripts are linked
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
/// Struct containing the authentication configuration /// Struct containing the authentication configuration
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]

View file

@ -199,12 +199,30 @@ pub fn get_bin_require_path(
luau_style_path(&path) luau_style_path(&path)
} }
/// Generate a linking module for a script
pub fn generate_script_linking_module(require_path: &str) -> String {
format!(r#"return require({require_path})"#)
}
/// Get the require path for a script
pub fn get_script_require_path(
base_dir: &Path,
script_file: &RelativePathBuf,
destination_dir: &Path,
) -> String {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
let path = script_file.to_path(path);
luau_style_path(&path)
}
/// Errors for the linking module utilities /// Errors for the linking module utilities
pub mod errors { pub mod errors {
use thiserror::Error; use thiserror::Error;
/// An error occurred while getting the require path for a library /// An error occurred while getting the require path for a library
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[non_exhaustive]
pub enum GetLibRequirePath { pub enum GetLibRequirePath {
/// The path for the RobloxPlaceKind could not be found /// The path for the RobloxPlaceKind could not be found
#[error("could not find the path for the RobloxPlaceKind {0}")] #[error("could not find the path for the RobloxPlaceKind {0}")]

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
linking::generator::get_file_types, linking::generator::get_file_types,
lockfile::DownloadedGraph, lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
manifest::Manifest, manifest::Manifest,
names::PackageNames, names::PackageNames,
scripts::{execute_script, ScriptName}, scripts::{execute_script, ScriptName},
@ -9,7 +9,7 @@ use crate::{
traits::PackageRef, traits::PackageRef,
version_id::VersionId, version_id::VersionId,
}, },
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::future::try_join_all; use futures::future::try_join_all;
@ -157,14 +157,93 @@ impl Project {
self.link(graph, &manifest, &Arc::new(package_types)).await self.link(graph, &manifest, &Arc::new(package_types)).await
} }
#[allow(clippy::too_many_arguments)]
async fn link_files(
&self,
base_folder: &Path,
container_folder: &Path,
root_container_folder: &Path,
relative_container_folder: &Path,
node: &DownloadedDependencyGraphNode,
name: &PackageNames,
version_id: &VersionId,
alias: &str,
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>,
manifest: &Manifest,
) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
if let Some(lib_file) = node.target.lib_path() {
let lib_module = generator::generate_lib_linking_module(
&generator::get_lib_require_path(
&node.target.kind(),
base_folder,
lib_file,
container_folder,
node.node.pkg_ref.use_new_structure(),
root_container_folder,
relative_container_folder,
manifest,
)?,
package_types
.get(name)
.and_then(|v| v.get(version_id))
.unwrap_or(&NO_TYPES),
);
write_cas(
base_folder.join(format!("{alias}.luau")),
self.cas_dir(),
&lib_module,
)
.await?;
}
if let Some(bin_file) = node.target.bin_path() {
let bin_module = generator::generate_bin_linking_module(
container_folder,
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
);
write_cas(
base_folder.join(format!("{alias}.bin.luau")),
self.cas_dir(),
&bin_module,
)
.await?;
}
if let Some(scripts) = node.target.scripts() {
let scripts_base =
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
.await?;
for (script_name, script_path) in scripts {
let script_module =
generator::generate_script_linking_module(&generator::get_script_require_path(
&scripts_base,
script_path,
container_folder,
));
write_cas(
scripts_base.join(format!("{script_name}.luau")),
self.cas_dir(),
&script_module,
)
.await?;
}
}
Ok(())
}
async fn link( async fn link(
&self, &self,
graph: &DownloadedGraph, graph: &DownloadedGraph,
manifest: &Arc<Manifest>, manifest: &Arc<Manifest>,
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>, package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
try_join_all(graph.iter().flat_map(|(name, versions)| { try_join_all(graph.iter().flat_map(|(name, versions)| {
versions.iter().map(|(version_id, node)| { versions.iter().map(|(version_id, node)| {
let name = name.clone(); let name = name.clone();
@ -186,46 +265,20 @@ impl Project {
version_id.version(), version_id.version(),
); );
if let Some((alias, _, _)) = &node.node.direct.as_ref() { if let Some((alias, _, _)) = &node.node.direct {
if let Some(lib_file) = node.target.lib_path() { self.link_files(
write_cas(
base_folder.join(format!("{alias}.luau")),
self.cas_dir(),
&generator::generate_lib_linking_module(
&generator::get_lib_require_path(
&node.target.kind(),
&base_folder, &base_folder,
lib_file,
&container_folder, &container_folder,
node.node.pkg_ref.use_new_structure(),
&base_folder, &base_folder,
container_folder.strip_prefix(&base_folder).unwrap(), container_folder.strip_prefix(&base_folder).unwrap(),
node,
&name,
version_id,
alias,
&package_types,
&manifest, &manifest,
)?,
package_types
.get(&name)
.and_then(|v| v.get(version_id))
.unwrap_or(&NO_TYPES),
),
) )
.await?; .await?;
};
if let Some(bin_file) = node.target.bin_path() {
write_cas(
base_folder.join(format!("{alias}.bin.luau")),
self.cas_dir(),
&generator::generate_bin_linking_module(
&container_folder,
&generator::get_bin_require_path(
&base_folder,
bin_file,
&container_folder,
),
),
)
.await?;
}
} }
(container_folder, base_folder) (container_folder, base_folder)
@ -244,10 +297,6 @@ impl Project {
)); ));
}; };
let Some(lib_file) = dependency_node.target.lib_path() else {
continue;
};
let base_folder = create_and_canonicalize( let base_folder = create_and_canonicalize(
self.package_dir().join( self.package_dir().join(
version_id version_id
@ -272,25 +321,17 @@ impl Project {
) )
.await?; .await?;
write_cas( self.link_files(
linker_folder.join(format!("{dependency_alias}.luau")),
self.cas_dir(),
&generator::generate_lib_linking_module(
&generator::get_lib_require_path(
&dependency_node.target.kind(),
&linker_folder, &linker_folder,
lib_file,
&container_folder, &container_folder,
dependency_node.node.pkg_ref.use_new_structure(),
&node_packages_folder, &node_packages_folder,
container_folder.strip_prefix(&base_folder).unwrap(), container_folder.strip_prefix(&base_folder).unwrap(),
dependency_node,
dependency_name,
dependency_version_id,
dependency_alias,
&package_types,
&manifest, &manifest,
)?,
package_types
.get(dependency_name)
.and_then(|v| v.get(dependency_version_id))
.unwrap_or(&NO_TYPES),
),
) )
.await?; .await?;
} }

View file

@ -2,7 +2,7 @@ use relative_path::RelativePathBuf;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay}; use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{ use std::{
collections::BTreeSet, collections::{BTreeMap, BTreeSet},
fmt::{Display, Formatter}, fmt::{Display, Formatter},
str::FromStr, str::FromStr,
}; };
@ -68,6 +68,11 @@ impl TargetKind {
format!("{dependency}_packages") format!("{dependency}_packages")
} }
/// Returns whether this target is a Roblox target
pub fn is_roblox(&self) -> bool {
matches!(self, TargetKind::Roblox | TargetKind::RobloxServer)
}
} }
/// A target of a package /// A target of a package
@ -77,7 +82,7 @@ pub enum Target {
/// A Roblox target /// A Roblox target
Roblox { Roblox {
/// The path to the lib export file /// The path to the lib export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
lib: Option<RelativePathBuf>, lib: Option<RelativePathBuf>,
/// The files to include in the sync tool's config /// The files to include in the sync tool's config
#[serde(default)] #[serde(default)]
@ -86,7 +91,7 @@ pub enum Target {
/// A Roblox server target /// A Roblox server target
RobloxServer { RobloxServer {
/// The path to the lib export file /// The path to the lib export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
lib: Option<RelativePathBuf>, lib: Option<RelativePathBuf>,
/// The files to include in the sync tool's config /// The files to include in the sync tool's config
#[serde(default)] #[serde(default)]
@ -95,19 +100,22 @@ pub enum Target {
/// A Lune target /// A Lune target
Lune { Lune {
/// The path to the lib export file /// The path to the lib export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
lib: Option<RelativePathBuf>, lib: Option<RelativePathBuf>,
/// The path to the bin export file /// The path to the bin export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
bin: Option<RelativePathBuf>, bin: Option<RelativePathBuf>,
/// The exported scripts
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
scripts: BTreeMap<String, RelativePathBuf>,
}, },
/// A Luau target /// A Luau target
Luau { Luau {
/// The path to the lib export file /// The path to the lib export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
lib: Option<RelativePathBuf>, lib: Option<RelativePathBuf>,
/// The path to the bin export file /// The path to the bin export file
#[serde(default)] #[serde(default, skip_serializing_if = "Option::is_none")]
bin: Option<RelativePathBuf>, bin: Option<RelativePathBuf>,
}, },
} }
@ -151,6 +159,14 @@ impl Target {
_ => None, _ => None,
} }
} }
/// Returns the scripts exported by this target
pub fn scripts(&self) -> Option<&BTreeMap<String, RelativePathBuf>> {
match self {
Target::Lune { scripts, .. } => Some(scripts),
_ => None,
}
}
} }
impl Display for Target { impl Display for Target {

View file

@ -17,7 +17,7 @@ use crate::{
target::{Target, TargetKind}, target::{Target, TargetKind},
DependencyType, DependencyType,
}, },
names::PackageNames, names::{PackageName, PackageNames},
source::{ source::{
fs::{store_in_cas, FSEntry, PackageFS}, fs::{store_in_cas, FSEntry, PackageFS},
git_index::{read_file, root_tree, GitBasedSource}, git_index::{read_file, root_tree, GitBasedSource},
@ -316,6 +316,9 @@ pub struct IndexConfig {
/// The maximum size of an archive in bytes /// The maximum size of an archive in bytes
#[serde(default = "default_archive_size")] #[serde(default = "default_archive_size")]
pub max_archive_size: usize, pub max_archive_size: usize,
/// The package to use for default script implementations
#[serde(default)]
pub scripts_package: Option<PackageName>,
} }
impl IndexConfig { impl IndexConfig {

View file

@ -108,6 +108,7 @@ pub mod errors {
/// Errors that can occur when parsing a version type /// Errors that can occur when parsing a version type
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[non_exhaustive]
pub enum VersionTypeFromStr { pub enum VersionTypeFromStr {
/// The version type is invalid /// The version type is invalid
#[error("invalid version type {0}")] #[error("invalid version type {0}")]
@ -116,6 +117,7 @@ pub mod errors {
/// Errors that can occur when parsing a version type or requirement /// Errors that can occur when parsing a version type or requirement
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[non_exhaustive]
pub enum VersionTypeOrReqFromStr { pub enum VersionTypeOrReqFromStr {
/// The version requirement is invalid /// The version requirement is invalid
#[error("invalid version requirement {0}")] #[error("invalid version requirement {0}")]