merge: lune-org/lune/main->feature/sched-return

This commit is contained in:
Erica Marigold 2024-06-10 15:11:58 +05:30
commit 4c41ad511b
No known key found for this signature in database
GPG key ID: 2768CC0C23D245D1
67 changed files with 3743 additions and 364 deletions

View file

@ -88,17 +88,20 @@ jobs:
- name: Build
run: |
cargo build \
--workspace \
--locked --all-features \
--target ${{ matrix.cargo-target }}
- name: Lint
run: |
cargo clippy \
--workspace \
--locked --all-features \
--target ${{ matrix.cargo-target }}
- name: Test
run: |
cargo test \
--lib --workspace \
--locked --all-features \
--target ${{ matrix.cargo-target }}

View file

@ -8,6 +8,61 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Unreleased
### Added
- Added a builtin API for hashing and calculating HMACs as part of the `serde` library
Basic usage:
```lua
local serde = require("@lune/serde")
local hash = serde.hash("sha256", "a message to hash")
local hmac = serde.hmac("sha256", "a message to hash", "a secret string")
print(hash)
print(hmac)
```
The returned hashes are sequences of lowercase hexadecimal digits. The following algorithms are supported:
`md5`, `sha1`, `sha224`, `sha256`, `sha384`, `sha512`, `sha3-224`, `sha3-256`, `sha3-384`, `sha3-512`, `blake3`
- Added two new options to `luau.load`:
- `codegenEnabled` - whether or not codegen should be enabled for the loaded chunk.
- `injectGlobals` - whether or not to inject globals into a passed `environment`.
By default, globals are injected and codegen is disabled.
Check the documentation for the `luau` standard library for more information.
### Changed
- Sandboxing and codegen in the Luau VM is now fully enabled, resulting in up to 2x or faster code execution.
This should not result in any behavior differences in Lune, but if it does, please open an issue.
- Improved formatting of custom error objects (such as when `fs.readFile` returns an error) when printed or formatted using `stdio.format`.
### Fixed
- Fixed `__type` and `__tostring` metamethods on userdatas and tables not being respected when printed or formatted using `stdio.format`.
## `0.8.5` - June 1st, 2024
### Changed
- Improved table pretty formatting when using `print`, `warn`, and `stdio.format`:
- Keys are sorted numerically / alphabetically when possible.
- Keys of different types are put in distinct sections for mixed tables.
- Tables that are arrays no longer display their keys.
- Empty tables are no longer spread across lines.
## Fixed
- Fixed formatted values in tables not being separated by newlines.
- Fixed panicking (crashing) when using `process.spawn` with a program that does not exist.
- Fixed `instance:SetAttribute("name", nil)` throwing an error and not removing the attribute.
## `0.8.4` - May 12th, 2024
### Added

565
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -16,6 +16,7 @@ members = [
"crates/lune-std-stdio",
"crates/lune-std-task",
"crates/lune-utils",
"crates/mlua-luau-scheduler",
]
# Profile for building the release binary, with the following options set:

View file

@ -1,4 +1,4 @@
[tools]
luau-lsp = "JohnnyMorganz/luau-lsp@1.29.0"
luau-lsp = "JohnnyMorganz/luau-lsp@1.29.1"
selene = "Kampfkarren/selene@0.27.1"
stylua = "JohnnyMorganz/StyLua@0.20.0"

View file

@ -1,6 +1,6 @@
[package]
name = "lune-roblox"
version = "0.1.0"
version = "0.1.1"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"

View file

@ -155,13 +155,18 @@ pub fn add_methods<'lua, M: LuaUserDataMethods<'lua, Instance>>(m: &mut M) {
|lua, this, (attribute_name, lua_value): (String, LuaValue)| {
ensure_not_destroyed(this)?;
ensure_valid_attribute_name(&attribute_name)?;
match lua_value.lua_to_dom_value(lua, None) {
Ok(dom_value) => {
ensure_valid_attribute_value(&dom_value)?;
this.set_attribute(attribute_name, dom_value);
Ok(())
if lua_value.is_nil() || lua_value.is_null() {
this.remove_attribute(attribute_name);
Ok(())
} else {
match lua_value.lua_to_dom_value(lua, None) {
Ok(dom_value) => {
ensure_valid_attribute_value(&dom_value)?;
this.set_attribute(attribute_name, dom_value);
Ok(())
}
Err(e) => Err(e.into()),
}
Err(e) => Err(e.into()),
}
},
);

View file

@ -442,6 +442,29 @@ impl Instance {
}
}
/**
Removes an attribute from the instance.
Note that this does not have an equivalent in the Roblox engine API,
but separating this from `set_attribute` lets `set_attribute` be more
ergonomic and not require an `Option<DomValue>` for the value argument.
The equivalent in the Roblox engine API would be `instance:SetAttribute(name, nil)`.
*/
pub fn remove_attribute(&self, name: impl AsRef<str>) {
let mut dom = INTERNAL_DOM.lock().expect("Failed to lock document");
let inst = dom
.get_by_ref_mut(self.dom_ref)
.expect("Failed to find instance in document");
if let Some(DomValue::Attributes(attributes)) =
inst.properties.get_mut(PROPERTY_NAME_ATTRIBUTES)
{
attributes.remove(name.as_ref());
if attributes.is_empty() {
inst.properties.remove(PROPERTY_NAME_ATTRIBUTES);
}
}
}
/**
Adds a tag to the instance.

View file

@ -44,26 +44,41 @@ fn load_source<'lua>(
(source, options): (LuaString<'lua>, LuauLoadOptions),
) -> LuaResult<LuaFunction<'lua>> {
let mut chunk = lua.load(source.as_bytes()).set_name(options.debug_name);
let env_changed = options.environment.is_some();
if let Some(environment) = options.environment {
let environment_with_globals = lua.create_table()?;
if let Some(custom_environment) = options.environment {
let environment = lua.create_table()?;
if let Some(meta) = environment.get_metatable() {
environment_with_globals.set_metatable(Some(meta));
// Inject all globals into the environment
if options.inject_globals {
for pair in lua.globals().pairs() {
let (key, value): (LuaValue, LuaValue) = pair?;
environment.set(key, value)?;
}
if let Some(global_metatable) = lua.globals().get_metatable() {
environment.set_metatable(Some(global_metatable));
}
} else if let Some(custom_metatable) = custom_environment.get_metatable() {
// Since we don't need to set the global metatable,
// we can just set a custom metatable if it exists
environment.set_metatable(Some(custom_metatable));
}
for pair in lua.globals().pairs() {
// Inject the custom environment
for pair in custom_environment.pairs() {
let (key, value): (LuaValue, LuaValue) = pair?;
environment_with_globals.set(key, value)?;
environment.set(key, value)?;
}
for pair in environment.pairs() {
let (key, value): (LuaValue, LuaValue) = pair?;
environment_with_globals.set(key, value)?;
}
chunk = chunk.set_environment(environment_with_globals);
chunk = chunk.set_environment(environment);
}
chunk.into_function()
// Enable JIT if codegen is enabled and the environment hasn't
// changed, otherwise disable JIT since it'll fall back anyways
lua.enable_jit(options.codegen_enabled && !env_changed);
let function = chunk.into_function()?;
lua.enable_jit(true);
Ok(function)
}

View file

@ -79,13 +79,11 @@ impl<'lua> FromLua<'lua> for LuauCompileOptions {
}
}
/**
Options for loading Lua source code.
*/
#[derive(Debug, Clone)]
pub struct LuauLoadOptions<'lua> {
pub(crate) debug_name: String,
pub(crate) environment: Option<LuaTable<'lua>>,
pub(crate) inject_globals: bool,
pub(crate) codegen_enabled: bool,
}
impl Default for LuauLoadOptions<'_> {
@ -93,6 +91,8 @@ impl Default for LuauLoadOptions<'_> {
Self {
debug_name: DEFAULT_DEBUG_NAME.to_string(),
environment: None,
inject_globals: true,
codegen_enabled: false,
}
}
}
@ -112,11 +112,21 @@ impl<'lua> FromLua<'lua> for LuauLoadOptions<'lua> {
options.environment = Some(environment);
}
if let Some(inject_globals) = t.get("injectGlobals")? {
options.inject_globals = inject_globals;
}
if let Some(codegen_enabled) = t.get("codegenEnabled")? {
options.codegen_enabled = codegen_enabled;
}
options
}
LuaValue::String(s) => Self {
debug_name: s.to_string_lossy().to_string(),
environment: None,
inject_globals: true,
codegen_enabled: false,
},
_ => {
return Err(LuaError::FromLuaConversionError {

View file

@ -14,7 +14,7 @@ workspace = true
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
bstr = "1.9"
futures-util = "0.3"

View file

@ -1,6 +1,6 @@
[package]
name = "lune-std-process"
version = "0.1.0"
version = "0.1.1"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"
@ -14,7 +14,7 @@ workspace = true
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
directories = "5.0"
pin-project = "1.0"

View file

@ -145,10 +145,7 @@ async fn process_spawn(
lua: &Lua,
(program, args, options): (String, Option<Vec<String>>, ProcessSpawnOptions),
) -> LuaResult<LuaTable> {
let res = lua
.spawn(spawn_command(program, args, options))
.await
.expect("Failed to receive result of spawned process");
let res = lua.spawn(spawn_command(program, args, options)).await?;
/*
NOTE: If an exit code was not given by the child process,

View file

@ -81,7 +81,7 @@ impl LuaUserData for LuaCaptures {
methods.add_meta_method(LuaMetaMethod::Len, |_, this, ()| Ok(this.num_captures()));
methods.add_meta_method(LuaMetaMethod::ToString, |_, this, ()| {
Ok(format!("RegexCaptures({})", this.num_captures()))
Ok(format!("{}", this.num_captures()))
});
}

View file

@ -47,7 +47,7 @@ impl LuaUserData for LuaMatch {
fn add_methods<'lua, M: LuaUserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_meta_method(LuaMetaMethod::Len, |_, this, ()| Ok(this.range().len()));
methods.add_meta_method(LuaMetaMethod::ToString, |_, this, ()| {
Ok(format!("RegexMatch({})", this.slice()))
Ok(this.slice().to_string())
});
}
}

View file

@ -66,7 +66,7 @@ impl LuaUserData for LuaRegex {
);
methods.add_meta_method(LuaMetaMethod::ToString, |_, this, ()| {
Ok(format!("Regex({})", this.inner.as_str()))
Ok(this.inner.as_str().to_string())
});
}

View file

@ -1,6 +1,6 @@
[package]
name = "lune-std-roblox"
version = "0.1.0"
version = "0.1.1"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"
@ -14,10 +14,10 @@ workspace = true
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
once_cell = "1.17"
rbx_cookie = { version = "0.1.4", default-features = false }
lune-utils = { version = "0.1.0", path = "../lune-utils" }
lune-roblox = { version = "0.1.0", path = "../lune-roblox" }
lune-roblox = { version = "0.1.1", path = "../lune-roblox" }

View file

@ -29,6 +29,16 @@ serde_json = { version = "1.0", features = ["preserve_order"] }
serde_yaml = "0.9"
toml = { version = "0.8", features = ["preserve_order"] }
digest = "0.10.7"
hmac = "0.12.1"
md-5 = "0.10.6"
sha1 = "0.10.6"
sha2 = "0.10.8"
sha3 = "0.10.8"
# This feature MIGHT break due to the unstable nature of the digest crate.
# Check before updating it.
blake3 = { version = "1.5.0", features = ["traits-preview"] }
tokio = { version = "1", default-features = false, features = [
"rt",
"io-util",

View file

@ -0,0 +1,234 @@
use std::fmt::Write;
use bstr::BString;
use md5::Md5;
use mlua::prelude::*;
use blake3::Hasher as Blake3;
use sha1::Sha1;
use sha2::{Sha224, Sha256, Sha384, Sha512};
use sha3::{Sha3_224, Sha3_256, Sha3_384, Sha3_512};
pub struct HashOptions {
algorithm: HashAlgorithm,
message: BString,
secret: Option<BString>,
// seed: Option<BString>,
}
#[derive(Debug, Clone, Copy)]
enum HashAlgorithm {
Md5,
Sha1,
// SHA-2 variants
Sha2_224,
Sha2_256,
Sha2_384,
Sha2_512,
// SHA-3 variants
Sha3_224,
Sha3_256,
Sha3_384,
Sha3_512,
// Blake3
Blake3,
}
impl HashAlgorithm {
pub fn list_all_as_string() -> String {
[
"md5", "sha1", "sha224", "sha256", "sha384", "sha512", "sha3-224", "sha3-256",
"sha3-384", "sha3-512", "blake3",
]
.join(", ")
}
}
impl HashOptions {
/**
Computes the hash for the `message` using whatever `algorithm` is
contained within this struct and returns it as a string of hex digits.
*/
#[inline]
#[must_use = "hashing a message is useless without using the resulting hash"]
pub fn hash(self) -> String {
use digest::Digest;
let message = self.message;
let bytes = match self.algorithm {
HashAlgorithm::Md5 => Md5::digest(message).to_vec(),
HashAlgorithm::Sha1 => Sha1::digest(message).to_vec(),
HashAlgorithm::Sha2_224 => Sha224::digest(message).to_vec(),
HashAlgorithm::Sha2_256 => Sha256::digest(message).to_vec(),
HashAlgorithm::Sha2_384 => Sha384::digest(message).to_vec(),
HashAlgorithm::Sha2_512 => Sha512::digest(message).to_vec(),
HashAlgorithm::Sha3_224 => Sha3_224::digest(message).to_vec(),
HashAlgorithm::Sha3_256 => Sha3_256::digest(message).to_vec(),
HashAlgorithm::Sha3_384 => Sha3_384::digest(message).to_vec(),
HashAlgorithm::Sha3_512 => Sha3_512::digest(message).to_vec(),
HashAlgorithm::Blake3 => Blake3::digest(message).to_vec(),
};
// We don't want to return raw binary data generally, since that's not
// what most people want a hash for. So we have to make a hex string.
bytes
.iter()
.fold(String::with_capacity(bytes.len() * 2), |mut output, b| {
let _ = write!(output, "{b:02x}");
output
})
}
/**
Computes the HMAC for the `message` using whatever `algorithm` and
`secret` are contained within this struct. The computed value is
returned as a string of hex digits.
# Errors
If the `secret` is not provided or is otherwise invalid.
*/
#[inline]
pub fn hmac(self) -> LuaResult<String> {
use hmac::{Hmac, Mac, SimpleHmac};
let secret = self
.secret
.ok_or_else(|| LuaError::FromLuaConversionError {
from: "nil",
to: "string or buffer",
message: Some("Argument #3 missing or nil".to_string()),
})?;
/*
These macros exist to remove what would ultimately be dozens of
repeating lines. Essentially, there's several step to processing
HMacs, which expands into the 3 lines you see below. However,
the Hmac struct is specialized towards eager block-based processes.
In order to support anything else, like blake3, there's a second
type named `SimpleHmac`. This results in duplicate macros like
there are below.
*/
macro_rules! hmac {
($Type:ty) => {{
let mut mac: Hmac<$Type> = Hmac::new_from_slice(&secret).into_lua_err()?;
mac.update(&self.message);
mac.finalize().into_bytes().to_vec()
}};
}
macro_rules! hmac_no_blocks {
($Type:ty) => {{
let mut mac: SimpleHmac<$Type> =
SimpleHmac::new_from_slice(&secret).into_lua_err()?;
mac.update(&self.message);
mac.finalize().into_bytes().to_vec()
}};
}
let bytes = match self.algorithm {
HashAlgorithm::Md5 => hmac!(Md5),
HashAlgorithm::Sha1 => hmac!(Sha1),
HashAlgorithm::Sha2_224 => hmac!(Sha224),
HashAlgorithm::Sha2_256 => hmac!(Sha256),
HashAlgorithm::Sha2_384 => hmac!(Sha384),
HashAlgorithm::Sha2_512 => hmac!(Sha512),
HashAlgorithm::Sha3_224 => hmac!(Sha3_224),
HashAlgorithm::Sha3_256 => hmac!(Sha3_256),
HashAlgorithm::Sha3_384 => hmac!(Sha3_384),
HashAlgorithm::Sha3_512 => hmac!(Sha3_512),
HashAlgorithm::Blake3 => hmac_no_blocks!(Blake3),
};
Ok(bytes
.iter()
.fold(String::with_capacity(bytes.len() * 2), |mut output, b| {
let _ = write!(output, "{b:02x}");
output
}))
}
}
impl<'lua> FromLua<'lua> for HashAlgorithm {
fn from_lua(value: LuaValue<'lua>, _lua: &'lua Lua) -> LuaResult<Self> {
if let LuaValue::String(str) = value {
/*
Casing tends to vary for algorithms, so rather than force
people to remember it we'll just accept any casing.
*/
let str = str.to_str()?.to_ascii_lowercase();
match str.as_str() {
"md5" => Ok(Self::Md5),
"sha1" => Ok(Self::Sha1),
"sha224" => Ok(Self::Sha2_224),
"sha256" => Ok(Self::Sha2_256),
"sha384" => Ok(Self::Sha2_384),
"sha512" => Ok(Self::Sha2_512),
"sha3-224" => Ok(Self::Sha3_224),
"sha3-256" => Ok(Self::Sha3_256),
"sha3-384" => Ok(Self::Sha3_384),
"sha3-512" => Ok(Self::Sha3_512),
"blake3" => Ok(Self::Blake3),
_ => Err(LuaError::FromLuaConversionError {
from: "string",
to: "HashAlgorithm",
message: Some(format!(
"Invalid hashing algorithm '{str}', valid kinds are:\n{}",
HashAlgorithm::list_all_as_string()
)),
}),
}
} else {
Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "HashAlgorithm",
message: None,
})
}
}
}
impl<'lua> FromLuaMulti<'lua> for HashOptions {
fn from_lua_multi(mut values: LuaMultiValue<'lua>, lua: &'lua Lua) -> LuaResult<Self> {
let algorithm = values
.pop_front()
.map(|value| HashAlgorithm::from_lua(value, lua))
.transpose()?
.ok_or_else(|| LuaError::FromLuaConversionError {
from: "nil",
to: "HashAlgorithm",
message: Some("Argument #1 missing or nil".to_string()),
})?;
let message = values
.pop_front()
.map(|value| BString::from_lua(value, lua))
.transpose()?
.ok_or_else(|| LuaError::FromLuaConversionError {
from: "nil",
to: "string or buffer",
message: Some("Argument #2 missing or nil".to_string()),
})?;
let secret = values
.pop_front()
.map(|value| BString::from_lua(value, lua))
.transpose()?;
// let seed = values
// .pop_front()
// .map(|value| BString::from_lua(value, lua))
// .transpose()?;
Ok(HashOptions {
algorithm,
message,
secret,
// seed,
})
}
}

View file

@ -7,9 +7,11 @@ use lune_utils::TableBuilder;
mod compress_decompress;
mod encode_decode;
mod hash;
pub use self::compress_decompress::{compress, decompress, CompressDecompressFormat};
pub use self::encode_decode::{decode, encode, EncodeDecodeConfig, EncodeDecodeFormat};
pub use self::hash::HashOptions;
/**
Creates the `serde` standard library module.
@ -24,6 +26,8 @@ pub fn module(lua: &Lua) -> LuaResult<LuaTable> {
.with_function("decode", serde_decode)?
.with_async_function("compress", serde_compress)?
.with_async_function("decompress", serde_decompress)?
.with_function("hash", hash_message)?
.with_function("hmac", hmac_message)?
.build_readonly()
}
@ -55,3 +59,11 @@ async fn serde_decompress(
let bytes = decompress(bs, format).await?;
lua.create_string(bytes)
}
fn hash_message(lua: &Lua, options: HashOptions) -> LuaResult<LuaString> {
lua.create_string(options.hash())
}
fn hmac_message(lua: &Lua, options: HashOptions) -> LuaResult<LuaString> {
lua.create_string(options.hmac()?)
}

View file

@ -15,7 +15,7 @@ workspace = true
[dependencies]
dialoguer = "0.11"
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
tokio = { version = "1", default-features = false, features = [
"io-std",

View file

@ -14,7 +14,7 @@ workspace = true
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
tokio = { version = "1", default-features = false, features = ["time"] }

View file

@ -33,12 +33,6 @@ pub fn module(lua: &Lua) -> LuaResult<LuaTable> {
.set_environment(task_delay_env)
.into_function()?;
// Overwrite resume & wrap functions on the coroutine global
// with ones that are compatible with our scheduler
let co = lua.globals().get::<_, LuaTable>("coroutine")?;
co.set("resume", fns.resume.clone())?;
co.set("wrap", fns.wrap.clone())?;
TableBuilder::new(lua)?
.with_value("cancel", fns.cancel)?
.with_value("defer", fns.defer)?

View file

@ -1,6 +1,6 @@
[package]
name = "lune-std"
version = "0.1.1"
version = "0.1.2"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"
@ -39,21 +39,21 @@ task = ["dep:lune-std-task"]
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = "0.0.2"
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1", default-features = false, features = ["fs", "sync"] }
lune-utils = { version = "0.1.0", path = "../lune-utils" }
lune-utils = { version = "0.1.1", path = "../lune-utils" }
lune-std-datetime = { optional = true, version = "0.1.1", path = "../lune-std-datetime" }
lune-std-fs = { optional = true, version = "0.1.0", path = "../lune-std-fs" }
lune-std-luau = { optional = true, version = "0.1.0", path = "../lune-std-luau" }
lune-std-net = { optional = true, version = "0.1.0", path = "../lune-std-net" }
lune-std-process = { optional = true, version = "0.1.0", path = "../lune-std-process" }
lune-std-process = { optional = true, version = "0.1.1", path = "../lune-std-process" }
lune-std-regex = { optional = true, version = "0.1.0", path = "../lune-std-regex" }
lune-std-roblox = { optional = true, version = "0.1.0", path = "../lune-std-roblox" }
lune-std-roblox = { optional = true, version = "0.1.1", path = "../lune-std-roblox" }
lune-std-serde = { optional = true, version = "0.1.0", path = "../lune-std-serde" }
lune-std-stdio = { optional = true, version = "0.1.0", path = "../lune-std-stdio" }
lune-std-task = { optional = true, version = "0.1.0", path = "../lune-std-task" }

View file

@ -1,6 +1,6 @@
[package]
name = "lune-utils"
version = "0.1.0"
version = "0.1.1"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"

View file

@ -1,7 +1,12 @@
use mlua::prelude::*;
use crate::fmt::ErrorComponents;
use super::{
metamethods::{call_table_tostring_metamethod, call_userdata_tostring_metamethod},
metamethods::{
call_table_tostring_metamethod, call_userdata_tostring_metamethod,
get_table_type_metavalue, get_userdata_type_metavalue,
},
style::{COLOR_CYAN, COLOR_GREEN, COLOR_MAGENTA, COLOR_YELLOW},
};
@ -56,19 +61,39 @@ pub(crate) fn format_value_styled(value: &LuaValue, prefer_plain: bool) -> Strin
LuaValue::Function(_) => COLOR_MAGENTA.apply_to("<function>").to_string(),
LuaValue::LightUserData(_) => COLOR_MAGENTA.apply_to("<pointer>").to_string(),
LuaValue::UserData(u) => {
if let Some(s) = call_userdata_tostring_metamethod(u) {
s
} else {
COLOR_MAGENTA.apply_to("<userdata>").to_string()
}
let formatted = format_typename_and_tostringed(
"userdata",
get_userdata_type_metavalue(u),
call_userdata_tostring_metamethod(u),
);
COLOR_MAGENTA.apply_to(formatted).to_string()
}
LuaValue::Table(t) => {
if let Some(s) = call_table_tostring_metamethod(t) {
s
} else {
COLOR_MAGENTA.apply_to("<table>").to_string()
}
let formatted = format_typename_and_tostringed(
"table",
get_table_type_metavalue(t),
call_table_tostring_metamethod(t),
);
COLOR_MAGENTA.apply_to(formatted).to_string()
}
_ => COLOR_MAGENTA.apply_to("<?>").to_string(),
LuaValue::Error(e) => COLOR_MAGENTA
.apply_to(format!(
"<LuaError(\n{})>",
ErrorComponents::from(e.clone())
))
.to_string(),
}
}
fn format_typename_and_tostringed(
fallback: &'static str,
typename: Option<String>,
tostringed: Option<String>,
) -> String {
match (typename, tostringed) {
(Some(typename), Some(tostringed)) => format!("<{typename}({tostringed})>"),
(Some(typename), None) => format!("<{typename}>"),
(None, Some(tostringed)) => format!("<{tostringed}>"),
(None, None) => format!("<{fallback}>"),
}
}

View file

@ -1,29 +1,37 @@
use mlua::prelude::*;
pub fn get_table_type_metavalue<'a>(tab: &'a LuaTable<'a>) -> Option<String> {
let s = tab
.get_metatable()?
.get::<_, LuaString>(LuaMetaMethod::Type.name())
.ok()?;
let s = s.to_str().ok()?;
Some(s.to_string())
}
pub fn get_userdata_type_metavalue<'a>(tab: &'a LuaAnyUserData<'a>) -> Option<String> {
let s = tab
.get_metatable()
.ok()?
.get::<LuaString>(LuaMetaMethod::Type.name())
.ok()?;
let s = s.to_str().ok()?;
Some(s.to_string())
}
pub fn call_table_tostring_metamethod<'a>(tab: &'a LuaTable<'a>) -> Option<String> {
let f = match tab.get_metatable() {
None => None,
Some(meta) => match meta.get::<_, LuaFunction>(LuaMetaMethod::ToString.name()) {
Ok(method) => Some(method),
Err(_) => None,
},
}?;
match f.call::<_, String>(()) {
Ok(res) => Some(res),
Err(_) => None,
}
tab.get_metatable()?
.get::<_, LuaFunction>(LuaMetaMethod::ToString.name())
.ok()?
.call(tab)
.ok()
}
pub fn call_userdata_tostring_metamethod<'a>(tab: &'a LuaAnyUserData<'a>) -> Option<String> {
let f = match tab.get_metatable() {
Err(_) => None,
Ok(meta) => match meta.get::<LuaFunction>(LuaMetaMethod::ToString.name()) {
Ok(method) => Some(method),
Err(_) => None,
},
}?;
match f.call::<_, String>(()) {
Ok(res) => Some(res),
Err(_) => None,
}
tab.get_metatable()
.ok()?
.get::<LuaFunction>(LuaMetaMethod::ToString.name())
.ok()?
.call(tab)
.ok()
}

View file

@ -1,3 +1,4 @@
use std::cmp::Ordering;
use std::collections::HashSet;
use std::fmt::{self, Write as _};
@ -50,35 +51,40 @@ pub(crate) fn format_value_recursive(
} else if !visited.insert(LuaValueId::from(t)) {
write!(buffer, "{}", STYLE_DIM.apply_to("{ recursive }"))?;
} else {
writeln!(buffer, "{}", STYLE_DIM.apply_to("{"))?;
write!(buffer, "{}", STYLE_DIM.apply_to("{"))?;
for res in t.clone().pairs::<LuaValue, LuaValue>() {
let (key, value) = res.expect("conversion to LuaValue should never fail");
let formatted = if let Some(plain_key) = lua_value_as_plain_string_key(&key) {
format!(
"{}{plain_key} {} {}{}",
INDENT.repeat(1 + depth),
STYLE_DIM.apply_to("="),
format_value_recursive(&value, config, visited, depth + 1)?,
STYLE_DIM.apply_to(","),
)
} else {
format!(
"{}{}{}{} {} {}{}",
INDENT.repeat(1 + depth),
STYLE_DIM.apply_to("["),
format_value_recursive(&key, config, visited, depth + 1)?,
STYLE_DIM.apply_to("]"),
STYLE_DIM.apply_to("="),
format_value_recursive(&value, config, visited, depth + 1)?,
STYLE_DIM.apply_to(","),
)
};
buffer.push_str(&formatted);
}
let mut values = t
.clone()
.pairs::<LuaValue, LuaValue>()
.map(|res| res.expect("conversion to LuaValue should never fail"))
.collect::<Vec<_>>();
sort_for_formatting(&mut values);
let is_empty = values.is_empty();
let is_array = values
.iter()
.enumerate()
.all(|(i, (key, _))| key.as_integer().is_some_and(|x| x == (i as i32) + 1));
let formatted_values = if is_array {
format_array(values, config, visited, depth)?
} else {
format_table(values, config, visited, depth)?
};
visited.remove(&LuaValueId::from(t));
write!(buffer, "\n{}", STYLE_DIM.apply_to("}"))?;
if is_empty {
write!(buffer, " {}", STYLE_DIM.apply_to("}"))?;
} else {
write!(
buffer,
"\n{}\n{}{}",
formatted_values.join("\n"),
INDENT.repeat(depth),
STYLE_DIM.apply_to("}")
)?;
}
}
} else {
let prefer_plain = depth == 0;
@ -87,3 +93,74 @@ pub(crate) fn format_value_recursive(
Ok(buffer)
}
fn sort_for_formatting(values: &mut [(LuaValue, LuaValue)]) {
values.sort_by(|(a, _), (b, _)| {
if a.type_name() == b.type_name() {
// If we have the same type, sort either numerically or alphabetically
match (a, b) {
(LuaValue::Integer(a), LuaValue::Integer(b)) => a.cmp(b),
(LuaValue::Number(a), LuaValue::Number(b)) => a.partial_cmp(b).unwrap(),
(LuaValue::String(a), LuaValue::String(b)) => a.to_str().ok().cmp(&b.to_str().ok()),
_ => Ordering::Equal,
}
} else {
// If we have different types, sort numbers first, then strings, then others
a.is_number()
.cmp(&b.is_number())
.then_with(|| a.is_string().cmp(&b.is_string()))
}
});
}
fn format_array(
values: Vec<(LuaValue, LuaValue)>,
config: &ValueFormatConfig,
visited: &mut HashSet<LuaValueId>,
depth: usize,
) -> Result<Vec<String>, fmt::Error> {
values
.into_iter()
.map(|(_, value)| {
Ok(format!(
"{}{}{}",
INDENT.repeat(1 + depth),
format_value_recursive(&value, config, visited, depth + 1)?,
STYLE_DIM.apply_to(","),
))
})
.collect()
}
fn format_table(
values: Vec<(LuaValue, LuaValue)>,
config: &ValueFormatConfig,
visited: &mut HashSet<LuaValueId>,
depth: usize,
) -> Result<Vec<String>, fmt::Error> {
values
.into_iter()
.map(|(key, value)| {
if let Some(plain_key) = lua_value_as_plain_string_key(&key) {
Ok(format!(
"{}{plain_key} {} {}{}",
INDENT.repeat(1 + depth),
STYLE_DIM.apply_to("="),
format_value_recursive(&value, config, visited, depth + 1)?,
STYLE_DIM.apply_to(","),
))
} else {
Ok(format!(
"{}{}{}{} {} {}{}",
INDENT.repeat(1 + depth),
STYLE_DIM.apply_to("["),
format_value_recursive(&key, config, visited, depth + 1)?,
STYLE_DIM.apply_to("]"),
STYLE_DIM.apply_to("="),
format_value_recursive(&value, config, visited, depth + 1)?,
STYLE_DIM.apply_to(","),
))
}
})
.collect()
}

View file

@ -1,6 +1,6 @@
[package]
name = "lune"
version = "0.8.4"
version = "0.8.5"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"
@ -51,7 +51,7 @@ workspace = true
[dependencies]
mlua = { version = "0.9.7", features = ["luau"] }
mlua-luau-scheduler = { git = "https://github.com/0x5eal/mlua-luau-scheduler-exitstatus.git" }
mlua-luau-scheduler = { version = "0.0.2", path = "../mlua-luau-scheduler" }
anyhow = "1.0"
console = "0.15"
@ -59,6 +59,7 @@ dialoguer = "0.11"
directories = "5.0"
futures-util = "0.3"
once_cell = "1.17"
self_cell = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
@ -70,9 +71,9 @@ reqwest = { version = "0.11", default-features = false, features = [
"rustls-tls",
] }
lune-std = { optional = true, version = "0.1.1", path = "../lune-std" }
lune-roblox = { optional = true, version = "0.1.0", path = "../lune-roblox" }
lune-utils = { version = "0.1.0", path = "../lune-utils" }
lune-std = { optional = true, version = "0.1.2", path = "../lune-std" }
lune-roblox = { optional = true, version = "0.1.1", path = "../lune-roblox" }
lune-utils = { version = "0.1.1", path = "../lune-utils" }
### CLI

View file

@ -8,15 +8,97 @@ use std::{
},
};
use mlua::{IntoLuaMulti as _, Lua, Value};
use mlua_luau_scheduler::Scheduler;
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
use self_cell::self_cell;
use super::{RuntimeError, RuntimeResult};
#[derive(Debug)]
// NOTE: We need to use self_cell to create a self-referential
// struct storing both the Lua VM and the scheduler. The scheduler
// needs to be created at the same time so that we can also create
// and inject the scheduler functions which will be used across runs.
self_cell! {
struct RuntimeInner {
owner: Rc<Lua>,
#[covariant]
dependent: Scheduler,
}
}
impl RuntimeInner {
fn create() -> LuaResult<Self> {
let lua = Rc::new(Lua::new());
lua.set_app_data(Rc::downgrade(&lua));
lua.set_app_data(Vec::<String>::new());
Self::try_new(lua, |lua| {
let sched = Scheduler::new(lua);
let fns = Functions::new(lua)?;
// Overwrite some globals that are not compatible with our scheduler
let co = lua.globals().get::<_, LuaTable>("coroutine")?;
co.set("resume", fns.resume.clone())?;
co.set("wrap", fns.wrap.clone())?;
// Inject all the globals that are enabled
#[cfg(any(
feature = "std-datetime",
feature = "std-fs",
feature = "std-luau",
feature = "std-net",
feature = "std-process",
feature = "std-regex",
feature = "std-roblox",
feature = "std-serde",
feature = "std-stdio",
feature = "std-task",
))]
{
lune_std::inject_globals(lua)?;
}
// Sandbox the Luau VM and make it go zooooooooom
lua.sandbox(true)?;
// _G table needs to be injected again after sandboxing,
// otherwise it will be read-only and completely unusable
#[cfg(any(
feature = "std-datetime",
feature = "std-fs",
feature = "std-luau",
feature = "std-net",
feature = "std-process",
feature = "std-regex",
feature = "std-roblox",
feature = "std-serde",
feature = "std-stdio",
feature = "std-task",
))]
{
let g_table = lune_std::LuneStandardGlobal::GTable;
lua.globals().set(g_table.name(), g_table.create(lua)?)?;
}
Ok(sched)
})
}
fn lua(&self) -> &Lua {
self.borrow_owner()
}
fn scheduler(&self) -> &Scheduler {
self.borrow_dependent()
}
}
/**
A Lune runtime.
*/
pub struct Runtime {
lua: Rc<Lua>,
args: Vec<String>,
inner: RuntimeInner,
}
impl Runtime {
@ -28,30 +110,8 @@ impl Runtime {
#[must_use]
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
let lua = Rc::new(Lua::new());
lua.set_app_data(Rc::downgrade(&lua));
lua.set_app_data(Vec::<String>::new());
#[cfg(any(
feature = "std-datetime",
feature = "std-fs",
feature = "std-luau",
feature = "std-net",
feature = "std-process",
feature = "std-regex",
feature = "std-roblox",
feature = "std-serde",
feature = "std-stdio",
feature = "std-task",
))]
{
lune_std::inject_globals(&lua).expect("Failed to inject globals");
}
Self {
lua,
args: Vec::new(),
inner: RuntimeInner::create().expect("Failed to create runtime"),
}
}
@ -59,12 +119,13 @@ impl Runtime {
Sets arguments to give in `process.args` for Lune scripts.
*/
#[must_use]
pub fn with_args<V>(mut self, args: V) -> Self
pub fn with_args<A, S>(self, args: A) -> Self
where
V: Into<Vec<String>>,
A: IntoIterator<Item = S>,
S: Into<String>,
{
self.args = args.into();
self.lua.set_app_data(self.args.clone());
let args = args.into_iter().map(Into::into).collect::<Vec<_>>();
self.inner.lua().set_app_data(args);
self
}
@ -81,21 +142,21 @@ impl Runtime {
&mut self,
script_name: impl AsRef<str>,
script_contents: impl AsRef<[u8]>,
) -> RuntimeResult<(u8, Vec<Value>)> {
) -> RuntimeResult<(u8, Vec<LuaValue>)> {
// Create a new scheduler for this run
let sched = Scheduler::new(&self.lua);
let lua = self.inner.lua();
let sched = self.inner.scheduler();
// Add error callback to format errors nicely + store status
let got_any_error = Arc::new(AtomicBool::new(false));
let got_any_inner = Arc::clone(&got_any_error);
sched.set_error_callback(move |e| {
self.inner.scheduler().set_error_callback(move |e| {
got_any_inner.store(true, Ordering::SeqCst);
eprintln!("{}", RuntimeError::from(e));
});
// Load our "main" thread
let main = self
.lua
let main = lua
.load(script_contents.as_ref())
.set_name(script_name.as_ref());
@ -105,7 +166,7 @@ impl Runtime {
let thread_res = match sched.get_thread_result(main_thread_id) {
Some(res) => res,
None => Value::Nil.into_lua_multi(&self.lua),
None => LuaValue::Nil.into_lua_multi(lua),
}?
.into_vec();

View file

@ -113,6 +113,7 @@ create_tests! {
luau_compile: "luau/compile",
luau_load: "luau/load",
luau_options: "luau/options",
luau_safeenv: "luau/safeenv",
}
#[cfg(feature = "std-net")]
@ -140,6 +141,7 @@ create_tests! {
process_spawn_async: "process/spawn/async",
process_spawn_basic: "process/spawn/basic",
process_spawn_cwd: "process/spawn/cwd",
process_spawn_no_panic: "process/spawn/no_panic",
process_spawn_shell: "process/spawn/shell",
process_spawn_stdin: "process/spawn/stdin",
process_spawn_stdio: "process/spawn/stdio",
@ -229,6 +231,8 @@ create_tests! {
serde_json_encode: "serde/json/encode",
serde_toml_decode: "serde/toml/decode",
serde_toml_encode: "serde/toml/encode",
serde_hashing_hash: "serde/hashing/hash",
serde_hashing_hmac: "serde/hashing/hmac",
}
#[cfg(feature = "std-stdio")]

View file

@ -0,0 +1,67 @@
[package]
name = "mlua-luau-scheduler"
version = "0.0.2"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/lune"
description = "Luau-based async scheduler, using mlua and async-executor"
readme = "README.md"
keywords = ["async", "luau", "scheduler"]
categories = ["async"]
[lib]
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
async-executor = "1.8"
blocking = "1.5"
concurrent-queue = "2.4"
derive_more = "0.99"
event-listener = "4.0"
futures-lite = "2.2"
rustc-hash = "1.1"
tracing = "0.1"
mlua = { version = "0.9.6", features = [
"luau",
"luau-jit",
"async",
"serialize",
] }
[dev-dependencies]
async-fs = "2.1"
async-io = "2.3"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing-tracy = "0.11"
[[example]]
name = "basic_sleep"
test = true
[[example]]
name = "basic_spawn"
test = true
[[example]]
name = "callbacks"
test = true
[[example]]
name = "exit_code"
test = true
[[example]]
name = "lots_of_threads"
test = true
[[example]]
name = "scheduler_ordering"
test = true
[[example]]
name = "tracy"
test = false

View file

@ -0,0 +1,78 @@
<!-- markdownlint-disable MD033 -->
<!-- markdownlint-disable MD041 -->
# `mlua-luau-scheduler`
An async scheduler for Luau, using [`mlua`][mlua] and built on top of [`async-executor`][async-executor].
This crate is runtime-agnostic and is compatible with any async runtime, including [Tokio][tokio], [smol][smol], [async-std][async-std], and others. </br>
However, since many dependencies are shared with [smol][smol], depending on it over other runtimes may be preferred.
[async-executor]: https://crates.io/crates/async-executor
[async-std]: https://async.rs
[mlua]: https://crates.io/crates/mlua
[smol]: https://github.com/smol-rs/smol
[tokio]: https://tokio.rs
## Example Usage
### 1. Import dependencies
```rs
use std::time::{Duration, Instant};
use std::io::ErrorKind;
use async_io::{block_on, Timer};
use async_fs::read_to_string;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
```
### 2. Set up Lua environment
```rs
let lua = Lua::new();
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: f64| async move {
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
lua.globals().set(
"readFile",
lua.create_async_function(|lua, path: String| async move {
// Spawn background task that does not take up resources on the lua thread
// Normally, futures in mlua can not be shared across threads, but this can
let task = lua.spawn(async move {
match read_to_string(path).await {
Ok(s) => Ok(Some(s)),
Err(e) if e.kind() == ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
}
});
task.await.into_lua_err()
})?,
)?;
```
### 3. Set up scheduler, run threads
```rs
let sched = Scheduler::new(&lua)?;
// We can create multiple lua threads ...
let sleepThread = lua.load("sleep(0.1)");
let fileThread = lua.load("readFile(\"Cargo.toml\")");
// ... spawn them both onto the scheduler ...
sched.push_thread_front(sleepThread, ());
sched.push_thread_front(fileThread, ());
// ... and run until they finish
block_on(sched.run());
```

View file

@ -0,0 +1,45 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cargo_common_metadata)]
use std::time::{Duration, Instant};
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::Scheduler;
const MAIN_SCRIPT: &str = include_str!("./lua/basic_sleep.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: f64| async move {
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
// Load the main script into a scheduler
let sched = Scheduler::new(&lua);
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_basic_sleep() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,64 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cargo_common_metadata)]
use std::io::ErrorKind;
use async_fs::read_to_string;
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::{LuaSpawnExt, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/basic_spawn.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
lua.globals().set(
"readFile",
lua.create_async_function(|lua, path: String| async move {
// Spawn background task that does not take up resources on the Lua thread
let task = lua.spawn(async move {
match read_to_string(path).await {
Ok(s) => Ok(Some(s)),
Err(e) if e.kind() == ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
}
});
// Wait for it to complete
let result = task.await.into_lua_err();
// We can also spawn local tasks that do take up resources
// on the Lua thread, but that do not have the Send bound
if result.is_ok() {
lua.spawn_local(async move {
println!("File read successfully!");
});
}
result
})?,
)?;
// Load the main script into a scheduler
let sched = Scheduler::new(&lua);
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_basic_spawn() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,48 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::cargo_common_metadata)]
use mlua::prelude::*;
use mlua_luau_scheduler::Scheduler;
use async_io::block_on;
const MAIN_SCRIPT: &str = include_str!("./lua/callbacks.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
// Create a new scheduler with custom callbacks
let sched = Scheduler::new(&lua);
sched.set_error_callback(|e| {
println!(
"Captured error from Lua!\n{}\n{e}\n{}",
"-".repeat(15),
"-".repeat(15)
);
});
// Load the main script into the scheduler, and keep track of the thread we spawn
let main = lua.load(MAIN_SCRIPT);
let id = sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// We should have gotten the error back from our script
assert!(sched.get_thread_result(id).unwrap().is_err());
Ok(())
}
#[test]
fn test_callbacks() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,43 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::cargo_common_metadata)]
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/exit_code.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("exit", fns.exit)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// Verify that we got a correct exit code
let code = sched.get_exit_code().unwrap_or_default();
assert_eq!(code, 1);
Ok(())
}
#[test]
fn test_exit_code() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,51 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cargo_common_metadata)]
use std::time::Duration;
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/lots_of_threads.luau");
const ONE_NANOSECOND: Duration = Duration::from_nanos(1);
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, ()| async move {
// Obviously we can't sleep for a single nanosecond since
// this uses OS scheduling under the hood, but we can try
Timer::after(ONE_NANOSECOND).await;
Ok(())
})?,
)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_lots_of_threads() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,13 @@
--!nocheck
--!nolint UnknownGlobal
print("Sleeping for 3 seconds...")
sleep(1)
print("1 second passed")
sleep(1)
print("2 seconds passed")
sleep(1)
print("3 seconds passed")

View file

@ -0,0 +1,17 @@
--!nocheck
--!nolint UnknownGlobal
local _, err = pcall(function()
local file = readFile("Cargo.toml")
if file ~= nil then
print("Cargo.toml found!")
print("Contents:")
print(file)
else
print("Cargo.toml not found!")
end
end)
if err ~= nil then
print("Error while reading file: " .. err)
end

View file

@ -0,0 +1,4 @@
--!nocheck
--!nolint UnknownGlobal
error("Oh no! Something went very very wrong!")

View file

@ -0,0 +1,8 @@
--!nocheck
--!nolint UnknownGlobal
print("Setting exit code manually")
exit(1)
error("unreachable")

View file

@ -0,0 +1,29 @@
--!nocheck
--!nolint UnknownGlobal
local NUM_BATCHES = 10
local NUM_THREADS = 100_000
print(`Spawning {NUM_BATCHES * NUM_THREADS} threads split into {NUM_BATCHES} batches\n`)
local before = os.clock()
for i = 1, NUM_BATCHES do
print(`Batch {i} of {NUM_BATCHES}`)
local thread = coroutine.running()
local counter = 0
for j = 1, NUM_THREADS do
spawn(function()
sleep(0.1)
counter += 1
if counter == NUM_THREADS then
spawn(thread)
end
end)
end
coroutine.yield()
end
local after = os.clock()
print(`\nSpawned {NUM_BATCHES * NUM_THREADS} sleeping threads in {after - before}s`)

View file

@ -0,0 +1,34 @@
--!nocheck
--!nolint UnknownGlobal
local nums = {}
local function insert(n: number)
table.insert(nums, n)
print(n)
end
insert(1)
-- Defer will run at the end of the resumption cycle, but without yielding
defer(function()
insert(5)
end)
-- Spawn will instantly run up until the first yield, and must then be resumed manually ...
spawn(function()
insert(2)
coroutine.yield()
error("unreachable code")
end)
-- ... unless calling functions created using `lua.create_async_function(...)`,
-- which will resume their calling thread with their result automatically
spawn(function()
insert(3)
sleep(1)
insert(6)
end)
insert(4)
return nums

View file

@ -0,0 +1,56 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::cargo_common_metadata)]
use std::time::{Duration, Instant};
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/scheduler_ordering.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set("defer", fns.defer)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: Option<f64>| async move {
let duration = duration.unwrap_or_default().max(1.0 / 250.0);
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
// Load the main script into the scheduler, and keep track of the thread we spawn
let main = lua.load(MAIN_SCRIPT);
let id = sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// We should have gotten proper values back from our script
let res = sched.get_thread_result(id).unwrap().unwrap();
let nums = Vec::<usize>::from_lua_multi(res, &lua)?;
assert_eq!(nums, vec![1, 2, 3, 4, 5, 6]);
Ok(())
}
#[test]
fn test_scheduler_ordering() -> LuaResult<()> {
main()
}

View file

@ -0,0 +1,61 @@
/*
NOTE: This example is the same as "lots_of_threads", but with tracy set up for performance profiling.
How to run:
1. Install tracy
- Follow the instructions at https://github.com/wolfpld/tracy
- Or install via something like homebrew: `brew install tracy`
2. Run the server (`tracy`) in a terminal
3. Run the example in another terminal
- `export RUST_LOG=trace`
- `cargo run --example tracy`
*/
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::cargo_common_metadata)]
use std::time::Duration;
use async_io::{block_on, Timer};
use tracing_subscriber::layer::SubscriberExt;
use tracing_tracy::{client::Client as TracyClient, TracyLayer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/lots_of_threads.luau");
const ONE_NANOSECOND: Duration = Duration::from_nanos(1);
pub fn main() -> LuaResult<()> {
let _client = TracyClient::start();
let _ = tracing::subscriber::set_global_default(
tracing_subscriber::registry().with(TracyLayer::default()),
);
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, ()| async move {
// Obviously we can't sleep for a single nanosecond since
// this uses OS scheduling under the hood, but we can try
Timer::after(ONE_NANOSECOND).await;
Ok(())
})?,
)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}

View file

@ -0,0 +1,45 @@
use std::{cell::RefCell, rc::Rc};
use mlua::prelude::*;
type ErrorCallback = Box<dyn Fn(LuaError) + Send + 'static>;
#[derive(Clone)]
pub(crate) struct ThreadErrorCallback {
inner: Rc<RefCell<Option<ErrorCallback>>>,
}
impl ThreadErrorCallback {
pub fn new() -> Self {
Self {
inner: Rc::new(RefCell::new(None)),
}
}
pub fn replace(&self, callback: impl Fn(LuaError) + Send + 'static) {
self.inner.borrow_mut().replace(Box::new(callback));
}
pub fn clear(&self) {
self.inner.borrow_mut().take();
}
pub fn call(&self, error: &LuaError) {
if let Some(cb) = &*self.inner.borrow() {
cb(error.clone());
}
}
}
#[allow(clippy::needless_pass_by_value)]
fn default_error_callback(e: LuaError) {
eprintln!("{e}");
}
impl Default for ThreadErrorCallback {
fn default() -> Self {
let this = Self::new();
this.replace(default_error_callback);
this
}
}

View file

@ -0,0 +1,31 @@
use std::{cell::Cell, process::ExitCode, rc::Rc};
use event_listener::Event;
#[derive(Debug, Clone)]
pub(crate) struct Exit {
code: Rc<Cell<Option<u8>>>,
event: Rc<Event>,
}
impl Exit {
pub fn new() -> Self {
Self {
code: Rc::new(Cell::new(None)),
event: Rc::new(Event::new()),
}
}
pub fn set(&self, code: u8) {
self.code.set(Some(code));
self.event.notify(usize::MAX);
}
pub fn get(&self) -> Option<u8> {
self.code.get()
}
pub async fn listen(&self) {
self.event.listen().await;
}
}

View file

@ -0,0 +1,283 @@
#![allow(unused_imports)]
#![allow(clippy::too_many_lines)]
use std::process::{ExitCode, ExitStatus};
use mlua::prelude::*;
use crate::{
error_callback::ThreadErrorCallback,
queue::{DeferredThreadQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
scheduler::Scheduler,
thread_id::ThreadId,
traits::LuaSchedulerExt,
util::{is_poll_pending, LuaThreadOrFunction, ThreadResult},
};
const ERR_METADATA_NOT_ATTACHED: &str = "\
Lua state does not have scheduler metadata attached!\
\nThis is most likely caused by creating functions outside of a scheduler.\
\nScheduler functions must always be created from within an active scheduler.\
";
const EXIT_IMPL_LUA: &str = r"
exit(...)
yield()
";
const WRAP_IMPL_LUA: &str = r"
local t = create(...)
return function(...)
local r = { resume(t, ...) }
if r[1] then
return select(2, unpack(r))
else
error(r[2], 2)
end
end
";
/**
A collection of lua functions that may be called to interact with a [`Scheduler`].
Note that these may all be implemented using [`LuaSchedulerExt`], however, this struct
is implemented using internal (non-public) APIs, and generally has better performance.
*/
pub struct Functions<'lua> {
/**
Implementation of `coroutine.resume` that handles async polling properly.
Defers onto the scheduler queue if the thread calls an async function.
*/
pub resume: LuaFunction<'lua>,
/**
Implementation of `coroutine.wrap` that handles async polling properly.
Defers onto the scheduler queue if the thread calls an async function.
*/
pub wrap: LuaFunction<'lua>,
/**
Resumes a function / thread once instantly, and runs until first yield.
Spawns onto the scheduler queue if not completed.
*/
pub spawn: LuaFunction<'lua>,
/**
Defers a function / thread onto the scheduler queue.
Does not resume instantly, only adds to the queue.
*/
pub defer: LuaFunction<'lua>,
/**
Cancels a function / thread, removing it from the queue.
*/
pub cancel: LuaFunction<'lua>,
/**
Exits the scheduler, stopping all other threads and closing the scheduler.
Yields the calling thread to ensure that it does not continue.
*/
pub exit: LuaFunction<'lua>,
}
impl<'lua> Functions<'lua> {
/**
Creates a new collection of Lua functions that may be called to interact with a [`Scheduler`].
# Errors
Errors when out of memory, or if default Lua globals are missing.
# Panics
Panics when the given [`Lua`] instance does not have an attached [`Scheduler`].
*/
pub fn new(lua: &'lua Lua) -> LuaResult<Self> {
let spawn_queue = lua
.app_data_ref::<SpawnedThreadQueue>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let defer_queue = lua
.app_data_ref::<DeferredThreadQueue>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let error_callback = lua
.app_data_ref::<ThreadErrorCallback>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let result_map = lua
.app_data_ref::<ThreadResultMap>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let resume_queue = defer_queue.clone();
let resume_map = result_map.clone();
let resume =
lua.create_function(move |lua, (thread, args): (LuaThread, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_resume").entered();
match thread.resume::<_, LuaMultiValue>(args.clone()) {
Ok(v) => {
if v.get(0).is_some_and(is_poll_pending) {
// Pending, defer to scheduler and return nil
resume_queue.push_item(lua, &thread, args)?;
(true, LuaValue::Nil).into_lua_multi(lua)
} else {
// Not pending, store the value if thread is done
if thread.status() != LuaThreadStatus::Resumable {
let id = ThreadId::from(&thread);
if resume_map.is_tracked(id) {
let res = ThreadResult::new(Ok(v.clone()), lua);
resume_map.insert(id, res);
}
}
(true, v).into_lua_multi(lua)
}
}
Err(e) => {
// Not pending, store the error
let id = ThreadId::from(&thread);
if resume_map.is_tracked(id) {
let res = ThreadResult::new(Err(e.clone()), lua);
resume_map.insert(id, res);
}
(false, e.to_string()).into_lua_multi(lua)
}
}
})?;
let wrap_env = lua.create_table_from(vec![
("resume", resume.clone()),
("error", lua.globals().get::<_, LuaFunction>("error")?),
("select", lua.globals().get::<_, LuaFunction>("select")?),
("unpack", lua.globals().get::<_, LuaFunction>("unpack")?),
(
"create",
lua.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("create")?,
),
])?;
let wrap = lua
.load(WRAP_IMPL_LUA)
.set_name("=__scheduler_wrap")
.set_environment(wrap_env)
.into_function()?;
let spawn_map = result_map.clone();
let spawn = lua.create_function(
move |lua, (tof, args): (LuaThreadOrFunction, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_spawn").entered();
let thread = tof.into_thread(lua)?;
if thread.status() == LuaThreadStatus::Resumable {
// NOTE: We need to resume the thread once instantly for correct behavior,
// and only if we get the pending value back we can spawn to async executor
match thread.resume::<_, LuaMultiValue>(args.clone()) {
Ok(v) => {
if v.get(0).is_some_and(is_poll_pending) {
spawn_queue.push_item(lua, &thread, args)?;
} else {
// Not pending, store the value if thread is done
if thread.status() != LuaThreadStatus::Resumable {
let id = ThreadId::from(&thread);
if spawn_map.is_tracked(id) {
let res = ThreadResult::new(Ok(v), lua);
spawn_map.insert(id, res);
}
}
}
}
Err(e) => {
error_callback.call(&e);
// Not pending, store the error
let id = ThreadId::from(&thread);
if spawn_map.is_tracked(id) {
let res = ThreadResult::new(Err(e), lua);
spawn_map.insert(id, res);
}
}
};
}
Ok(thread)
},
)?;
let defer = lua.create_function(
move |lua, (tof, args): (LuaThreadOrFunction, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_defer").entered();
let thread = tof.into_thread(lua)?;
if thread.status() == LuaThreadStatus::Resumable {
defer_queue.push_item(lua, &thread, args)?;
}
Ok(thread)
},
)?;
let close = lua
.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("close")?;
let close_key = lua.create_registry_value(close)?;
let cancel = lua.create_function(move |lua, thread: LuaThread| {
let _span = tracing::trace_span!("Scheduler::fn_cancel").entered();
let close: LuaFunction = lua.registry_value(&close_key)?;
match close.call(thread) {
Err(LuaError::CoroutineInactive) | Ok(()) => Ok(()),
Err(e) => Err(e),
}
})?;
let exit_env = lua.create_table_from(vec![
(
"exit",
lua.create_function(|lua, code: Option<u8>| {
let _span = tracing::trace_span!("Scheduler::fn_exit").entered();
let code = code.unwrap_or_default();
lua.set_exit_code(code);
Ok(())
})?,
),
(
"yield",
lua.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("yield")?,
),
])?;
let exit = lua
.load(EXIT_IMPL_LUA)
.set_name("=__scheduler_exit")
.set_environment(exit_env)
.into_function()?;
Ok(Self {
resume,
wrap,
spawn,
defer,
cancel,
exit,
})
}
}
impl Functions<'_> {
/**
Injects [`Scheduler`]-compatible functions into the given [`Lua`] instance.
This will overwrite the following functions:
- `coroutine.resume`
- `coroutine.wrap`
# Errors
Errors when out of memory, or if default Lua globals are missing.
*/
pub fn inject_compat(&self, lua: &Lua) -> LuaResult<()> {
let co: LuaTable = lua.globals().get("coroutine")?;
co.set("resume", self.resume.clone())?;
co.set("wrap", self.wrap.clone())?;
Ok(())
}
}

View file

@ -0,0 +1,18 @@
#![allow(clippy::cargo_common_metadata)]
mod error_callback;
mod exit;
mod functions;
mod queue;
mod result_map;
mod scheduler;
mod status;
mod thread_id;
mod traits;
mod util;
pub use functions::Functions;
pub use scheduler::Scheduler;
pub use status::Status;
pub use thread_id::ThreadId;
pub use traits::{IntoLuaThread, LuaSchedulerExt, LuaSpawnExt};

View file

@ -0,0 +1,139 @@
use std::{pin::Pin, rc::Rc};
use concurrent_queue::ConcurrentQueue;
use derive_more::{Deref, DerefMut};
use event_listener::Event;
use futures_lite::{Future, FutureExt};
use mlua::prelude::*;
use crate::{traits::IntoLuaThread, util::ThreadWithArgs, ThreadId};
/**
Queue for storing [`LuaThread`]s with associated arguments.
Provides methods for pushing and draining the queue, as
well as listening for new items being pushed to the queue.
*/
#[derive(Debug, Clone)]
pub(crate) struct ThreadQueue {
queue: Rc<ConcurrentQueue<ThreadWithArgs>>,
event: Rc<Event>,
}
impl ThreadQueue {
pub fn new() -> Self {
let queue = Rc::new(ConcurrentQueue::unbounded());
let event = Rc::new(Event::new());
Self { queue, event }
}
pub fn push_item<'lua>(
&self,
lua: &'lua Lua,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let thread = thread.into_lua_thread(lua)?;
let args = args.into_lua_multi(lua)?;
tracing::trace!("pushing item to queue with {} args", args.len());
let id = ThreadId::from(&thread);
let stored = ThreadWithArgs::new(lua, thread, args)?;
self.queue.push(stored).into_lua_err()?;
self.event.notify(usize::MAX);
Ok(id)
}
#[inline]
pub fn drain_items<'outer, 'lua>(
&'outer self,
lua: &'lua Lua,
) -> impl Iterator<Item = (LuaThread<'lua>, LuaMultiValue<'lua>)> + 'outer
where
'lua: 'outer,
{
self.queue.try_iter().map(|stored| stored.into_inner(lua))
}
#[inline]
pub async fn wait_for_item(&self) {
if self.queue.is_empty() {
let listener = self.event.listen();
// NOTE: Need to check again, we could have gotten
// new queued items while creating our listener
if self.queue.is_empty() {
listener.await;
}
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.queue.is_empty()
}
}
/**
Alias for [`ThreadQueue`], providing a newtype to store in Lua app data.
*/
#[derive(Debug, Clone, Deref, DerefMut)]
pub(crate) struct SpawnedThreadQueue(ThreadQueue);
impl SpawnedThreadQueue {
pub fn new() -> Self {
Self(ThreadQueue::new())
}
}
/**
Alias for [`ThreadQueue`], providing a newtype to store in Lua app data.
*/
#[derive(Debug, Clone, Deref, DerefMut)]
pub(crate) struct DeferredThreadQueue(ThreadQueue);
impl DeferredThreadQueue {
pub fn new() -> Self {
Self(ThreadQueue::new())
}
}
pub type LocalBoxFuture<'fut> = Pin<Box<dyn Future<Output = ()> + 'fut>>;
/**
Queue for storing local futures.
Provides methods for pushing and draining the queue, as
well as listening for new items being pushed to the queue.
*/
#[derive(Debug, Clone)]
pub(crate) struct FuturesQueue<'fut> {
queue: Rc<ConcurrentQueue<LocalBoxFuture<'fut>>>,
event: Rc<Event>,
}
impl<'fut> FuturesQueue<'fut> {
pub fn new() -> Self {
let queue = Rc::new(ConcurrentQueue::unbounded());
let event = Rc::new(Event::new());
Self { queue, event }
}
pub fn push_item(&self, fut: impl Future<Output = ()> + 'fut) {
let _ = self.queue.push(fut.boxed_local());
self.event.notify(usize::MAX);
}
pub fn drain_items<'outer>(
&'outer self,
) -> impl Iterator<Item = LocalBoxFuture<'fut>> + 'outer {
self.queue.try_iter()
}
pub async fn wait_for_item(&self) {
if self.queue.is_empty() {
self.event.listen().await;
}
}
}

View file

@ -0,0 +1,64 @@
#![allow(clippy::inline_always)]
use std::{cell::RefCell, rc::Rc};
use event_listener::Event;
// NOTE: This is the hash algorithm that mlua also uses, so we
// are not adding any additional dependencies / bloat by using it.
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{thread_id::ThreadId, util::ThreadResult};
#[derive(Clone)]
pub(crate) struct ThreadResultMap {
tracked: Rc<RefCell<FxHashSet<ThreadId>>>,
results: Rc<RefCell<FxHashMap<ThreadId, ThreadResult>>>,
events: Rc<RefCell<FxHashMap<ThreadId, Rc<Event>>>>,
}
impl ThreadResultMap {
pub fn new() -> Self {
Self {
tracked: Rc::new(RefCell::new(FxHashSet::default())),
results: Rc::new(RefCell::new(FxHashMap::default())),
events: Rc::new(RefCell::new(FxHashMap::default())),
}
}
#[inline(always)]
pub fn track(&self, id: ThreadId) {
self.tracked.borrow_mut().insert(id);
}
#[inline(always)]
pub fn is_tracked(&self, id: ThreadId) -> bool {
self.tracked.borrow().contains(&id)
}
pub fn insert(&self, id: ThreadId, result: ThreadResult) {
debug_assert!(self.is_tracked(id), "Thread must be tracked");
self.results.borrow_mut().insert(id, result);
if let Some(event) = self.events.borrow_mut().remove(&id) {
event.notify(usize::MAX);
}
}
pub async fn listen(&self, id: ThreadId) {
debug_assert!(self.is_tracked(id), "Thread must be tracked");
if !self.results.borrow().contains_key(&id) {
let listener = {
let mut events = self.events.borrow_mut();
let event = events.entry(id).or_insert_with(|| Rc::new(Event::new()));
event.listen()
};
listener.await;
}
}
pub fn remove(&self, id: ThreadId) -> Option<ThreadResult> {
let res = self.results.borrow_mut().remove(&id)?;
self.tracked.borrow_mut().remove(&id);
self.events.borrow_mut().remove(&id);
Some(res)
}
}

View file

@ -0,0 +1,484 @@
#![allow(clippy::module_name_repetitions)]
use std::{
cell::Cell,
process::ExitCode,
rc::{Rc, Weak as WeakRc},
sync::{Arc, Weak as WeakArc},
thread::panicking,
};
use futures_lite::prelude::*;
use mlua::prelude::*;
use async_executor::{Executor, LocalExecutor};
use tracing::{debug, instrument, trace, trace_span, Instrument};
use crate::{
error_callback::ThreadErrorCallback,
exit::Exit,
queue::{DeferredThreadQueue, FuturesQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
status::Status,
thread_id::ThreadId,
traits::IntoLuaThread,
util::{run_until_yield, ThreadResult},
};
const ERR_METADATA_ALREADY_ATTACHED: &str = "\
Lua state already has scheduler metadata attached!\
\nThis may be caused by running multiple schedulers on the same Lua state, or a call to Scheduler::run being cancelled.\
\nOnly one scheduler can be used per Lua state at once, and schedulers must always run until completion.\
";
const ERR_METADATA_REMOVED: &str = "\
Lua state scheduler metadata was unexpectedly removed!\
\nThis should never happen, and is likely a bug in the scheduler.\
";
const ERR_SET_CALLBACK_WHEN_RUNNING: &str = "\
Cannot set error callback when scheduler is running!\
";
/**
A scheduler for running Lua threads and async tasks.
*/
#[derive(Clone)]
pub struct Scheduler<'lua> {
lua: &'lua Lua,
queue_spawn: SpawnedThreadQueue,
queue_defer: DeferredThreadQueue,
error_callback: ThreadErrorCallback,
result_map: ThreadResultMap,
status: Rc<Cell<Status>>,
exit: Exit,
}
impl<'lua> Scheduler<'lua> {
/**
Creates a new scheduler for the given Lua state.
This scheduler will have a default error callback that prints errors to stderr.
# Panics
Panics if the given Lua state already has a scheduler attached to it.
*/
#[must_use]
pub fn new(lua: &'lua Lua) -> Scheduler<'lua> {
let queue_spawn = SpawnedThreadQueue::new();
let queue_defer = DeferredThreadQueue::new();
let error_callback = ThreadErrorCallback::default();
let result_map = ThreadResultMap::new();
let exit = Exit::new();
assert!(
lua.app_data_ref::<SpawnedThreadQueue>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<DeferredThreadQueue>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<ThreadErrorCallback>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<ThreadResultMap>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<Exit>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
lua.set_app_data(queue_spawn.clone());
lua.set_app_data(queue_defer.clone());
lua.set_app_data(error_callback.clone());
lua.set_app_data(result_map.clone());
lua.set_app_data(exit.clone());
let status = Rc::new(Cell::new(Status::NotStarted));
Scheduler {
lua,
queue_spawn,
queue_defer,
error_callback,
result_map,
status,
exit,
}
}
/**
Sets the current status of this scheduler and emits relevant tracing events.
*/
fn set_status(&self, status: Status) {
debug!(status = ?status, "status");
self.status.set(status);
}
/**
Returns the current status of this scheduler.
*/
#[must_use]
pub fn status(&self) -> Status {
self.status.get()
}
/**
Sets the error callback for this scheduler.
This callback will be called whenever a Lua thread errors.
Overwrites any previous error callback.
# Panics
Panics if the scheduler is currently running.
*/
pub fn set_error_callback(&self, callback: impl Fn(LuaError) + Send + 'static) {
assert!(
!self.status().is_running(),
"{ERR_SET_CALLBACK_WHEN_RUNNING}"
);
self.error_callback.replace(callback);
}
/**
Clears the error callback for this scheduler.
This will remove any current error callback, including default(s).
# Panics
Panics if the scheduler is currently running.
*/
pub fn remove_error_callback(&self) {
assert!(
!self.status().is_running(),
"{ERR_SET_CALLBACK_WHEN_RUNNING}"
);
self.error_callback.clear();
}
/**
Gets the exit code for this scheduler, if one has been set.
*/
#[must_use]
pub fn get_exit_code(&self) -> Option<u8> {
self.exit.get()
}
/**
Sets the exit code for this scheduler.
This will cause [`Scheduler::run`] to exit immediately.
*/
pub fn set_exit_code(&self, code: u8) {
self.exit.set(code);
}
/**
Spawns a chunk / function / thread onto the scheduler queue.
Threads are guaranteed to be resumed in the order that they were pushed to the queue.
# Returns
Returns a [`ThreadId`] that can be used to retrieve the result of the thread.
Note that the result may not be available until [`Scheduler::run`] completes.
# Errors
Errors when out of memory.
*/
pub fn push_thread_front(
&self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let id = self.queue_spawn.push_item(self.lua, thread, args)?;
self.result_map.track(id);
Ok(id)
}
/**
Defers a chunk / function / thread onto the scheduler queue.
Deferred threads are guaranteed to run after all spawned threads either yield or complete.
Threads are guaranteed to be resumed in the order that they were pushed to the queue.
# Returns
Returns a [`ThreadId`] that can be used to retrieve the result of the thread.
Note that the result may not be available until [`Scheduler::run`] completes.
# Errors
Errors when out of memory.
*/
pub fn push_thread_back(
&self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let id = self.queue_defer.push_item(self.lua, thread, args)?;
self.result_map.track(id);
Ok(id)
}
/**
Gets the tracked result for the [`LuaThread`] with the given [`ThreadId`].
Depending on the current [`Scheduler::status`], this method will return:
- [`Status::NotStarted`]: returns `None`.
- [`Status::Running`]: may return `Some(Ok(v))` or `Some(Err(e))`, but it is not guaranteed.
- [`Status::Completed`]: returns `Some(Ok(v))` or `Some(Err(e))`.
Note that this method also takes the value out of the scheduler and
stops tracking the given thread, so it may only be called once.
Any subsequent calls after this method returns `Some` will return `None`.
*/
#[must_use]
pub fn get_thread_result(&self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>> {
self.result_map.remove(id).map(|r| r.value(self.lua))
}
/**
Waits for the [`LuaThread`] with the given [`ThreadId`] to complete.
This will return instantly if the thread has already completed.
*/
pub async fn wait_for_thread(&self, id: ThreadId) {
self.result_map.listen(id).await;
}
/**
Runs the scheduler until all Lua threads have completed.
Note that the given Lua state must be the same one that was
used to create this scheduler, otherwise this method will panic.
# Panics
Panics if the given Lua state already has a scheduler attached to it.
*/
#[allow(clippy::too_many_lines)]
#[instrument(level = "debug", name = "Scheduler::run", skip(self))]
pub async fn run(&self) {
/*
Create new executors to use - note that we do not need create multiple executors
for work stealing, the user may do that themselves if they want to and it will work
just fine, as long as anything async is .await-ed from within a Lua async function.
The main purpose of the two executors here is just to have one with
the Send bound, and another (local) one without it, for Lua scheduling.
We also use the main executor to drive the main loop below forward,
saving a tiny bit of processing from going on the Lua executor itself.
*/
let local_exec = LocalExecutor::new();
let main_exec = Arc::new(Executor::new());
let fut_queue = Rc::new(FuturesQueue::new());
/*
Store the main executor and queue in Lua, so that they may be used with LuaSchedulerExt.
Also ensure we do not already have an executor or queues - these are definite user errors
and may happen if the user tries to run multiple schedulers on the same Lua state at once.
*/
assert!(
self.lua.app_data_ref::<WeakArc<Executor>>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
self.lua.app_data_ref::<WeakRc<FuturesQueue>>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
self.lua.set_app_data(Arc::downgrade(&main_exec));
self.lua.set_app_data(Rc::downgrade(&fut_queue.clone()));
/*
Manually tick the Lua executor, while running under the main executor.
Each tick we wait for the next action to perform, in prioritized order:
1. The exit event is triggered by setting an exit code
2. A Lua thread is available to run on the spawned queue
3. A Lua thread is available to run on the deferred queue
4. A new thread-local future is available to run on the local executor
5. Task(s) scheduled on the Lua executor have made progress and should be polled again
This ordering is vital to ensure that we don't accidentally exit the main loop
when there are new Lua threads to enqueue and potentially more work to be done.
*/
let fut = async {
let result_map = self.result_map.clone();
let process_thread = |thread: LuaThread<'lua>, args| {
// NOTE: Thread may have been cancelled from Lua
// before we got here, so we need to check it again
if thread.status() == LuaThreadStatus::Resumable {
// Check if we should be tracking this thread
let id = ThreadId::from(&thread);
let id_tracked = result_map.is_tracked(id);
let result_map_inner = if id_tracked {
Some(result_map.clone())
} else {
None
};
// Create our future which will run the thread and store its final result
let fut = async move {
if id_tracked {
// Run until yield and check if we got a final result
if let Some(res) = run_until_yield(thread.clone(), args).await {
if let Err(e) = res.as_ref() {
self.error_callback.call(e);
}
if thread.status() != LuaThreadStatus::Resumable {
let thread_res = ThreadResult::new(res, self.lua);
result_map_inner.unwrap().insert(id, thread_res);
}
}
} else {
// Just run until yield
if let Some(res) = run_until_yield(thread, args).await {
if let Err(e) = res.as_ref() {
self.error_callback.call(e);
}
}
}
};
// Spawn it on the executor
local_exec.spawn(fut).detach();
}
};
loop {
let fut_exit = self.exit.listen(); // 1
let fut_spawn = self.queue_spawn.wait_for_item(); // 2
let fut_defer = self.queue_defer.wait_for_item(); // 3
let fut_futs = fut_queue.wait_for_item(); // 4
// 5
let mut num_processed = 0;
let span_tick = trace_span!("Scheduler::tick");
let fut_tick = async {
local_exec.tick().await;
// NOTE: Try to do as much work as possible instead of just a single tick()
num_processed += 1;
while local_exec.try_tick() {
num_processed += 1;
}
};
// 1 + 2 + 3 + 4 + 5
fut_exit
.or(fut_spawn)
.or(fut_defer)
.or(fut_futs)
.or(fut_tick.instrument(span_tick.or_current()))
.await;
// Check if we should exit
if self.exit.get().is_some() {
debug!("exit signal received");
break;
}
// Process spawned threads first, then deferred threads, then futures
let mut num_spawned = 0;
let mut num_deferred = 0;
let mut num_futures = 0;
{
let _span = trace_span!("Scheduler::drain_spawned").entered();
for (thread, args) in self.queue_spawn.drain_items(self.lua) {
process_thread(thread, args);
num_spawned += 1;
}
}
{
let _span = trace_span!("Scheduler::drain_deferred").entered();
for (thread, args) in self.queue_defer.drain_items(self.lua) {
process_thread(thread, args);
num_deferred += 1;
}
}
{
let _span = trace_span!("Scheduler::drain_futures").entered();
for fut in fut_queue.drain_items() {
local_exec.spawn(fut).detach();
num_futures += 1;
}
}
// Empty executor = we didn't spawn any new Lua tasks
// above, and there are no remaining tasks to run later
let completed = local_exec.is_empty()
&& self.queue_spawn.is_empty()
&& self.queue_defer.is_empty();
trace!(
futures_spawned = num_futures,
futures_processed = num_processed,
lua_threads_spawned = num_spawned,
lua_threads_deferred = num_deferred,
"loop"
);
if completed {
break;
}
}
};
// Run the executor inside a span until all lua threads complete
self.set_status(Status::Running);
main_exec.run(fut).await;
self.set_status(Status::Completed);
// Clean up
self.lua
.remove_app_data::<WeakArc<Executor>>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<WeakRc<FuturesQueue>>()
.expect(ERR_METADATA_REMOVED);
}
}
impl Drop for Scheduler<'_> {
fn drop(&mut self) {
if panicking() {
// Do not cause further panics if already panicking, as
// this may abort the program instead of safely unwinding
self.lua.remove_app_data::<SpawnedThreadQueue>();
self.lua.remove_app_data::<DeferredThreadQueue>();
self.lua.remove_app_data::<ThreadErrorCallback>();
self.lua.remove_app_data::<ThreadResultMap>();
self.lua.remove_app_data::<Exit>();
} else {
// In any other case we panic if metadata was removed incorrectly
self.lua
.remove_app_data::<SpawnedThreadQueue>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<DeferredThreadQueue>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<ThreadErrorCallback>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<ThreadResultMap>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<Exit>()
.expect(ERR_METADATA_REMOVED);
}
}
}

View file

@ -0,0 +1,31 @@
#![allow(clippy::module_name_repetitions)]
/**
The current status of a scheduler.
*/
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Status {
/// The scheduler has not yet started running.
NotStarted,
/// The scheduler is currently running.
Running,
/// The scheduler has completed.
Completed,
}
impl Status {
#[must_use]
pub const fn is_not_started(self) -> bool {
matches!(self, Self::NotStarted)
}
#[must_use]
pub const fn is_running(self) -> bool {
matches!(self, Self::Running)
}
#[must_use]
pub const fn is_completed(self) -> bool {
matches!(self, Self::Completed)
}
}

View file

@ -0,0 +1,30 @@
use std::hash::{Hash, Hasher};
use mlua::prelude::*;
/**
Opaque and unique ID representing a [`LuaThread`].
Typically used for associating metadata with a thread in a structure such as a `HashMap<ThreadId, ...>`.
Note that holding a `ThreadId` does not prevent the thread from being garbage collected.
The actual thread may or may not still exist and be active at any given point in time.
*/
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ThreadId {
inner: usize,
}
impl From<&LuaThread<'_>> for ThreadId {
fn from(thread: &LuaThread) -> Self {
Self {
inner: thread.to_pointer() as usize,
}
}
}
impl Hash for ThreadId {
fn hash<H: Hasher>(&self, state: &mut H) {
self.inner.hash(state);
}
}

View file

@ -0,0 +1,378 @@
#![allow(unused_imports)]
#![allow(clippy::missing_errors_doc)]
use std::{
cell::Cell, future::Future, process::ExitCode, rc::Weak as WeakRc, sync::Weak as WeakArc,
};
use async_executor::{Executor, Task};
use mlua::prelude::*;
use tracing::trace;
use crate::{
exit::Exit,
queue::{DeferredThreadQueue, FuturesQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
scheduler::Scheduler,
thread_id::ThreadId,
};
/**
Trait for any struct that can be turned into an [`LuaThread`]
and passed to the scheduler, implemented for the following types:
- Lua threads ([`LuaThread`])
- Lua functions ([`LuaFunction`])
- Lua chunks ([`LuaChunk`])
*/
pub trait IntoLuaThread<'lua> {
/**
Converts the value into a Lua thread.
# Errors
Errors when out of memory.
*/
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>>;
}
impl<'lua> IntoLuaThread<'lua> for LuaThread<'lua> {
fn into_lua_thread(self, _: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
Ok(self)
}
}
impl<'lua> IntoLuaThread<'lua> for LuaFunction<'lua> {
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
lua.create_thread(self)
}
}
impl<'lua> IntoLuaThread<'lua> for LuaChunk<'lua, '_> {
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
lua.create_thread(self.into_function()?)
}
}
impl<'lua, T> IntoLuaThread<'lua> for &T
where
T: IntoLuaThread<'lua> + Clone,
{
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
self.clone().into_lua_thread(lua)
}
}
/**
Trait for interacting with the current [`Scheduler`].
Provides extra methods on the [`Lua`] struct for:
- Setting the exit code and forcibly stopping the scheduler
- Pushing (spawning) and deferring (pushing to the back) lua threads
- Tracking and getting the result of lua threads
*/
pub trait LuaSchedulerExt<'lua> {
/**
Sets the exit code of the current scheduler.
See [`Scheduler::set_exit_code`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn set_exit_code(&self, code: u8);
/**
Pushes (spawns) a lua thread to the **front** of the current scheduler.
See [`Scheduler::push_thread_front`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn push_thread_front(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId>;
/**
Pushes (defers) a lua thread to the **back** of the current scheduler.
See [`Scheduler::push_thread_back`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn push_thread_back(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId>;
/**
Registers the given thread to be tracked within the current scheduler.
Must be called before waiting for a thread to complete or getting its result.
*/
fn track_thread(&'lua self, id: ThreadId);
/**
Gets the result of the given thread.
See [`Scheduler::get_thread_result`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn get_thread_result(&'lua self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>>;
/**
Waits for the given thread to complete.
See [`Scheduler::wait_for_thread`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn wait_for_thread(&'lua self, id: ThreadId) -> impl Future<Output = ()>;
}
/**
Trait for interacting with the [`Executor`] for the current [`Scheduler`].
Provides extra methods on the [`Lua`] struct for:
- Spawning thread-local (`!Send`) futures on the current executor
- Spawning background (`Send`) futures on the current executor
- Spawning blocking tasks on a separate thread pool
*/
pub trait LuaSpawnExt<'lua> {
/**
Spawns the given future on the current executor and returns its [`Task`].
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnBackgroundTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn(async move {
println!("Hello from background task!");
}).await;
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnBackgroundTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn<F, T>(&self, fut: F) -> Task<T>
where
F: Future<Output = T> + Send + 'static,
T: Send + 'static;
/**
Spawns the given thread-local future on the current executor.
Note that this future will run detached and always to completion,
preventing the [`Scheduler`] was spawned on from completing until done.
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnLocalTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn_local(async move {
println!("Hello from local task!");
});
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnLocalTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn_local<F>(&self, fut: F)
where
F: Future<Output = ()> + 'static;
/**
Spawns the given blocking function and returns its [`Task`].
This function will run on a separate thread pool and not block the current executor.
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnBlockingTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn_blocking(|| {
println!("Hello from blocking task!");
}).await;
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnBlockingTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn_blocking<F, T>(&self, f: F) -> Task<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static;
}
impl<'lua> LuaSchedulerExt<'lua> for Lua {
fn set_exit_code(&self, code: u8) {
let exit = self
.app_data_ref::<Exit>()
.expect("exit code can only be set from within an active scheduler");
exit.set(code);
}
fn push_thread_front(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let queue = self
.app_data_ref::<SpawnedThreadQueue>()
.expect("lua threads can only be pushed from within an active scheduler");
queue.push_item(self, thread, args)
}
fn push_thread_back(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let queue = self
.app_data_ref::<DeferredThreadQueue>()
.expect("lua threads can only be pushed from within an active scheduler");
queue.push_item(self, thread, args)
}
fn track_thread(&'lua self, id: ThreadId) {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads can only be tracked from within an active scheduler");
map.track(id);
}
fn get_thread_result(&'lua self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>> {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads results can only be retrieved from within an active scheduler");
map.remove(id).map(|r| r.value(self))
}
fn wait_for_thread(&'lua self, id: ThreadId) -> impl Future<Output = ()> {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads results can only be retrieved from within an active scheduler");
async move { map.listen(id).await }
}
}
impl<'lua> LuaSpawnExt<'lua> for Lua {
fn spawn<F, T>(&self, fut: F) -> Task<T>
where
F: Future<Output = T> + Send + 'static,
T: Send + 'static,
{
let exec = self
.app_data_ref::<WeakArc<Executor>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning future on executor");
exec.spawn(fut)
}
fn spawn_local<F>(&self, fut: F)
where
F: Future<Output = ()> + 'static,
{
let queue = self
.app_data_ref::<WeakRc<FuturesQueue>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning local task on executor");
queue.push_item(fut);
}
fn spawn_blocking<F, T>(&self, f: F) -> Task<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
let exec = self
.app_data_ref::<WeakArc<Executor>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning blocking task on executor");
exec.spawn(blocking::unblock(f))
}
}

View file

@ -0,0 +1,147 @@
use futures_lite::StreamExt;
use mlua::prelude::*;
use tracing::instrument;
/**
Runs a Lua thread until it manually yields (using coroutine.yield), errors, or completes.
May return `None` if the thread was cancelled.
Otherwise returns the values yielded by the thread, or the error that caused it to stop.
*/
#[instrument(level = "trace", name = "Scheduler::run_until_yield", skip_all)]
pub(crate) async fn run_until_yield<'lua>(
thread: LuaThread<'lua>,
args: LuaMultiValue<'lua>,
) -> Option<LuaResult<LuaMultiValue<'lua>>> {
let mut stream = thread.into_async(args);
/*
NOTE: It is very important that we drop the thread/stream as
soon as we are done, it takes up valuable Lua registry space
and detached tasks will not drop until the executor does
https://github.com/smol-rs/smol/issues/294
We also do not unwrap here since returning `None` is expected behavior for cancellation.
Even though we are converting into a stream, and then immediately running it,
the future may still be cancelled before it is polled, which gives us None.
*/
stream.next().await
}
/**
Checks if the given [`LuaValue`] is the async `POLL_PENDING` constant.
*/
#[inline]
pub(crate) fn is_poll_pending(value: &LuaValue) -> bool {
value
.as_light_userdata()
.is_some_and(|l| l == Lua::poll_pending())
}
/**
Representation of a [`LuaResult`] with an associated [`LuaMultiValue`] currently stored in the Lua registry.
*/
#[derive(Debug)]
pub(crate) struct ThreadResult {
inner: LuaResult<LuaRegistryKey>,
}
impl ThreadResult {
pub fn new(result: LuaResult<LuaMultiValue>, lua: &Lua) -> Self {
Self {
inner: match result {
Ok(v) => Ok({
let vec = v.into_vec();
lua.create_registry_value(vec).expect("out of memory")
}),
Err(e) => Err(e),
},
}
}
pub fn value(self, lua: &Lua) -> LuaResult<LuaMultiValue> {
match self.inner {
Ok(key) => {
let vec = lua.registry_value(&key).unwrap();
lua.remove_registry_value(key).unwrap();
Ok(LuaMultiValue::from_vec(vec))
}
Err(e) => Err(e.clone()),
}
}
}
/**
Representation of a [`LuaThread`] with its associated arguments currently stored in the Lua registry.
*/
#[derive(Debug)]
pub(crate) struct ThreadWithArgs {
key_thread: LuaRegistryKey,
key_args: LuaRegistryKey,
}
impl ThreadWithArgs {
pub fn new<'lua>(
lua: &'lua Lua,
thread: LuaThread<'lua>,
args: LuaMultiValue<'lua>,
) -> LuaResult<Self> {
let argsv = args.into_vec();
let key_thread = lua.create_registry_value(thread)?;
let key_args = lua.create_registry_value(argsv)?;
Ok(Self {
key_thread,
key_args,
})
}
pub fn into_inner(self, lua: &Lua) -> (LuaThread<'_>, LuaMultiValue<'_>) {
let thread = lua.registry_value(&self.key_thread).unwrap();
let argsv = lua.registry_value(&self.key_args).unwrap();
let args = LuaMultiValue::from_vec(argsv);
lua.remove_registry_value(self.key_thread).unwrap();
lua.remove_registry_value(self.key_args).unwrap();
(thread, args)
}
}
/**
Wrapper struct to accept either a Lua thread or a Lua function as function argument.
[`LuaThreadOrFunction::into_thread`] may be used to convert the value into a Lua thread.
*/
#[derive(Clone)]
pub(crate) enum LuaThreadOrFunction<'lua> {
Thread(LuaThread<'lua>),
Function(LuaFunction<'lua>),
}
impl<'lua> LuaThreadOrFunction<'lua> {
pub(super) fn into_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
match self {
Self::Thread(t) => Ok(t),
Self::Function(f) => lua.create_thread(f),
}
}
}
impl<'lua> FromLua<'lua> for LuaThreadOrFunction<'lua> {
fn from_lua(value: LuaValue<'lua>, _: &'lua Lua) -> LuaResult<Self> {
match value {
LuaValue::Thread(t) => Ok(Self::Thread(t)),
LuaValue::Function(f) => Ok(Self::Function(f)),
value => Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "LuaThreadOrFunction",
message: Some("Expected thread or function".to_string()),
}),
}
}
}

View file

@ -26,11 +26,11 @@ assert(
"expected source block name for 'luau.load' to return a custom debug name"
)
local success = pcall(function()
local loadSuccess = pcall(function()
luau.load(luau.compile(RETURN_LUAU_CODE_BLOCK))
end)
assert(success, "expected `luau.load` to be able to process the result of `luau.compile`")
assert(loadSuccess, "expected `luau.load` to be able to process the result of `luau.compile`")
local CUSTOM_SOURCE_WITH_FOO_FN = "return foo()"
@ -48,34 +48,92 @@ local fooFn = luau.load(CUSTOM_SOURCE_WITH_FOO_FN, {
local fooFnRet = fooFn()
assert(fooFnRet == fooValue, "expected `luau.load` with custom environment to return proper values")
local CUSTOM_SOURCE_WITH_PRINT_FN = "return print()"
-- NOTE: Same as what we did above, new userdata to guarantee unique-ness
local overriddenValue = newproxy(false)
local overriddenFn = luau.load(CUSTOM_SOURCE_WITH_PRINT_FN, {
local fooValue2 = newproxy(false)
local fooFn2 = luau.load(CUSTOM_SOURCE_WITH_FOO_FN, {
environment = {
print = function()
return overriddenValue
foo = function()
return fooValue2
end,
},
enableGlobals = false,
})
local overriddenFnRet = overriddenFn()
local fooFn2Ret = fooFn2()
assert(
overriddenFnRet == overriddenValue,
fooFn2Ret == fooValue2,
"expected `luau.load` with custom environment and no default globals to still return proper values"
)
local CUSTOM_SOURCE_WITH_PRINT_FN = "return print()"
-- NOTE: Testing overriding the print function
local overriddenPrintValue1 = newproxy(false)
local overriddenPrintFn1 = luau.load(CUSTOM_SOURCE_WITH_PRINT_FN, {
environment = {
print = function()
return overriddenPrintValue1
end,
},
enableGlobals = true,
})
local overriddenPrintFnRet1 = overriddenPrintFn1()
assert(
overriddenPrintFnRet1 == overriddenPrintValue1,
"expected `luau.load` with overridden environment to return proper values"
)
local CUSTOM_SOURCE_WITH_DEFAULT_FN = "return string.lower(...)"
local overriddenFn2 = luau.load(CUSTOM_SOURCE_WITH_DEFAULT_FN, {
local overriddenPrintValue2 = newproxy(false)
local overriddenPrintFn2 = luau.load(CUSTOM_SOURCE_WITH_PRINT_FN, {
environment = {
hello = "world",
print = function()
return overriddenPrintValue2
end,
},
enableGlobals = false,
})
local overriddenFn2Ret = overriddenFn2("LOWERCASE")
local overriddenPrintFnRet2 = overriddenPrintFn2()
assert(
overriddenFn2Ret == "lowercase",
"expected `luau.load` with overridden environment to contain default globals"
overriddenPrintFnRet2 == overriddenPrintValue2,
"expected `luau.load` with overridden environment and disabled default globals to return proper values"
)
-- NOTE: Testing whether injectGlobals works
local CUSTOM_SOURCE_WITH_DEFAULT_FN = "return string.lower(...)"
local lowerFn1 = luau.load(CUSTOM_SOURCE_WITH_DEFAULT_FN, {
environment = {},
injectGlobals = false,
})
local lowerFn1Success = pcall(lowerFn1, "LOWERCASE")
assert(
not lowerFn1Success,
"expected `luau.load` with injectGlobals = false and empty custom environment to not contain default globals"
)
local lowerFn2 = luau.load(CUSTOM_SOURCE_WITH_DEFAULT_FN, {
environment = { string = string },
injectGlobals = false,
})
local lowerFn2Success, lowerFn2Result = pcall(lowerFn2, "LOWERCASE")
assert(
lowerFn2Success and lowerFn2Result == "lowercase",
"expected `luau.load` with injectGlobals = false and valid custom environment to return proper values"
)
local lowerFn3 = luau.load(CUSTOM_SOURCE_WITH_DEFAULT_FN, {
environment = {},
injectGlobals = true,
})
local lowerFn3Success, lowerFn3Result = pcall(lowerFn3, "LOWERCASE")
assert(
lowerFn3Success and lowerFn3Result == "lowercase",
"expected `luau.load` with injectGlobals = true and empty custom environment to return proper values"
)

64
tests/luau/safeenv.luau Normal file
View file

@ -0,0 +1,64 @@
local luau = require("@lune/luau")
local TEST_SCRIPT = [[
local start = os.clock()
local x
for i = 1, 1e6 do
x = math.sqrt(i)
end
local finish = os.clock()
return finish - start
]]
local TEST_BYTECODE = luau.compile(TEST_SCRIPT, {
optimizationLevel = 2,
coverageLevel = 0,
debugLevel = 0,
})
-- Load the bytecode with different configurations
local safeCodegenFunction = luau.load(TEST_BYTECODE, {
debugName = "safeCodegenFunction",
codegenEnabled = true,
})
local unsafeCodegenFunction = luau.load(TEST_BYTECODE, {
debugName = "unsafeCodegenFunction",
environment = {},
injectGlobals = true,
codegenEnabled = true,
})
local safeFunction = luau.load(TEST_BYTECODE, {
debugName = "safeFunction",
codegenEnabled = false,
})
local unsafeFunction = luau.load(TEST_BYTECODE, {
debugName = "unsafeFunction",
environment = {},
injectGlobals = true,
codegenEnabled = false,
})
-- Run the functions to get the timings
local safeCodegenTime = safeCodegenFunction()
local unsafeCodegenTime = unsafeCodegenFunction()
local safeTime = safeFunction()
local unsafeTime = unsafeFunction()
-- Assert that safeCodegenTime is always twice as fast as both unsafe functions
local safeCodegenUpperBound = safeCodegenTime * 2
assert(
unsafeCodegenTime > safeCodegenUpperBound and unsafeTime > safeCodegenUpperBound,
"expected luau.load with codegenEnabled = true and no custom environment to use codegen"
)
-- Assert that safeTime is always atleast twice as fast as both unsafe functions
local safeUpperBound = safeTime * 2
assert(
unsafeCodegenTime > safeUpperBound and unsafeTime > safeUpperBound,
"expected luau.load with codegenEnabled = false and no custom environment to have safeenv enabled"
)
-- Normally we'd also want to check whether codegen is actually being enabled by
-- comparing timings of safe_codegen_fn and safe_fn but since we don't have a way of
-- checking whether the current device even supports codegen, we can't safely test this.

View file

@ -0,0 +1,7 @@
local process = require("@lune/process")
-- Spawning a child process for a non-existent
-- program should not panic, but should error
local success = pcall(process.spawn, "someProgramThatDoesNotExist")
assert(not success, "Spawned a non-existent program")

View file

@ -3,14 +3,14 @@
local regex = require("@lune/regex")
local re = regex.new("[0-9]+")
assert(tostring(re) == "Regex([0-9]+)")
assert(tostring(re) == "[0-9]+")
assert(typeof(re) == "Regex")
local mtch = re:find("1337 wow")
assert(tostring(mtch) == "RegexMatch(1337)")
assert(tostring(mtch) == "1337")
assert(typeof(mtch) == "RegexMatch")
local re2 = regex.new("([0-9]+) ([0-9]+) wow! ([0-9]+) ([0-9]+)")
local captures = re2:captures("1337 125600 wow! 1984 0")
assert(tostring(captures) == "RegexCaptures(4)")
assert(tostring(captures) == "4")
assert(typeof(captures) == "RegexCaptures")

View file

@ -101,6 +101,11 @@ local folder = Instance.new("Folder")
folder:SetAttribute("Foo", "Bar")
assert(folder:GetAttribute("Foo") == "Bar")
-- Setting attributes to nil should work
folder:SetAttribute("Foo", nil)
assert(folder:GetAttribute("Foo") == nil)
-- Writing files with modified attributes should work
local game = Instance.new("DataModel")

View file

@ -0,0 +1,48 @@
local serde = require("@lune/serde")
local TEST_INPUT =
"Luau is a fast, small, safe, gradually typed embeddable scripting language derived from Lua."
local function test_case_hash(algorithm: serde.HashAlgorithm, expected: string)
assert(
serde.hash(algorithm, TEST_INPUT) == expected,
`hashing algorithm '{algorithm}' did not hash test string correctly`
)
assert(
serde.hash(algorithm, buffer.fromstring(TEST_INPUT)) == expected,
`hashing algorithm '{algorithm}' did not hash test buffer correctly`
)
end
test_case_hash("blake3", "eccfe3a6696b2a1861c64cc78663cff51301058e5dc22bb6249e7e1e0173d7fe")
test_case_hash("md5", "2aed9e020b49d219dc383884c5bd7acd")
test_case_hash("sha1", "9dce74190857f36e6d3f5e8eb7fe704a74060726")
test_case_hash("sha224", "f7ccd8a5f2697df8470b66f03824e073075292a1fab40d3a2ddc2e83")
test_case_hash("sha256", "f1d149bfd1ea38833ae6abf2a6fece1531532283820d719272e9cf3d9344efea")
test_case_hash(
"sha384",
"f6da4b47846c6016a9b32f01b861e45195cf1fa6fc5c9dd2257f7dc1c14092f11001839ec1223c30ab7adb7370812863"
)
test_case_hash(
"sha512",
"49fd834fdf3d4eaf4d4aff289acfc24d649f81cee7a5a7940e5c86854e04816f0a97c53f2ca4908969a512ec5ad1dc466422e3928f5ce3da9913959315df807c"
)
test_case_hash("sha3-224", "56a4dd1ff1bd9baff7f8bbe380dbf2c75b073161693f94ebf91aeee5")
test_case_hash("sha3-256", "ee01be10e0dc133cd702999e854b396f40b039d5ba6ddec9d04bf8623ba04dd7")
test_case_hash(
"sha3-384",
"e992f31e638b47802f33a4327c0a951823e32491ddcef5af9ce18cff84475c98ced23928d47ef51a8a4299dfe2ece361"
)
test_case_hash(
"sha3-512",
"08bd02aca3052b7740de80b8e8b9969dc9059a4bfae197095430e0aa204fbd3afb11731b127559b90c2f7e295835ea844ddbb29baf2fdb1d823046052c120fc9"
)
local failed = pcall(serde.hash, "a random string" :: any, "input that shouldn't be hashed")
assert(failed == false, "serde.hash shouldn't allow invalid algorithms passed to it!")
assert(
serde.hash("sha256", "\0oh no invalid utf-8\127\0\255")
== "c18ed3188f9e93f9ecd3582d7398c45120b0b30a0e26243809206228ab711b78",
"serde.hash should hash invalid UTF-8 just fine"
)

View file

@ -0,0 +1,60 @@
local serde = require("@lune/serde")
local INPUT_STRING = "important data to verify the integrity of"
-- if you read this string, you're obligated to keep it a secret! :-)
local SECRET_STRING = "don't read this we operate on the honor system"
local function test_case_hmac(algorithm: serde.HashAlgorithm, expected: string)
assert(
serde.hmac(algorithm, INPUT_STRING, SECRET_STRING) == expected,
`HMAC test for algorithm '{algorithm}' was not correct with string input and string secret`
)
assert(
serde.hmac(algorithm, INPUT_STRING, buffer.fromstring(SECRET_STRING)) == expected,
`HMAC test for algorithm '{algorithm}' was not correct with string input and buffer secret`
)
assert(
serde.hmac(algorithm, buffer.fromstring(INPUT_STRING), SECRET_STRING) == expected,
`HMAC test for algorithm '{algorithm}' was not correct with buffer input and string secret`
)
assert(
serde.hmac(algorithm, buffer.fromstring(INPUT_STRING), buffer.fromstring(SECRET_STRING))
== expected,
`HMAC test for algorithm '{algorithm}' was not correct with buffer input and buffer secret`
)
end
test_case_hmac("blake3", "1d9c1b9405567fc565c2c3c6d6c0e170be72a2623d29911f43cb2ce42a373c01")
test_case_hmac("md5", "525379669c93ab5f59d2201024145b79")
test_case_hmac("sha1", "75227c11ed65133788feab0ce7eb8efc8c1f0517")
test_case_hmac("sha224", "47a4857d7d7e1070f47f76558323e03471a918facaf3667037519c29")
test_case_hmac("sha256", "4a4816ab8d4b780a8cf131e34a3df25e4c7bc4eba453cd86e50271aab4e95f45")
test_case_hmac(
"sha384",
"6b24aeae78d0f84ec8a4669b24bda1131205535233c344f4262c1f90f29af04c5537612c269bbab8aaca9d8293f4a280"
)
test_case_hmac(
"sha512",
"9fffa071241e2f361f8a47a97d251c1d4aae37498efbc49745bf9916d8431f1f361080d350067ed65744d3da42956da33ec57b04901a5fd63a891381a1485ef7"
)
test_case_hmac("sha3-224", "ea102dfaa74aa285555bdba29a04429dfd4e997fa40322459094929f")
test_case_hmac("sha3-256", "17bde287e4692e5b7f281e444efefe92e00696a089570bd6814fd0e03d7763d2")
test_case_hmac(
"sha3-384",
"24f68401653d25f36e7ee8635831215f8b46710d4e133c9d1e091e5972c69b0f1d0cb80f5507522fa174d5c4746963c1"
)
test_case_hmac(
"sha3-512",
"d2566d156c254ced0101159f97187dbf48d900b8361fa5ebdd7e81409856b1b6a21d93a1fb6e8f700e75620d244ab9e894454030da12d158e9362ffe090d2669"
)
local failed =
pcall(serde.hmac, "a random string" :: any, "input that shouldn't be hashed", "not a secret")
assert(failed == false, "serde.hmac shouldn't allow invalid algorithms passed to it!")
assert(
serde.hmac("sha256", "\0oh no invalid utf-8\127\0\255", SECRET_STRING)
== "1f0d7f65016e9e4c340e3ba23da2483a7dc101ce8a9405f834c23f2e19232c3d",
"serde.hmac should hash invalid UTF-8 just fine"
)

View file

@ -1,13 +1,92 @@
local process = require("@lune/process")
local regex = require("@lune/regex")
local roblox = require("@lune/roblox")
local stdio = require("@lune/stdio")
assert(
stdio.format("Hello", "world", "!") == "Hello world !",
"Format should add a single space between arguments"
local function assertFormatting(errorMessage: string, formatted: string, expected: string)
if formatted ~= expected then
stdio.ewrite(string.format("%s\nExpected: %s\nGot: %s", errorMessage, expected, formatted))
process.exit(1)
end
end
local function assertContains(errorMessage: string, haystack: string, needle: string)
if string.find(haystack, needle) == nil then
stdio.ewrite(string.format("%s\nHaystack: %s\nNeedle: %s", errorMessage, needle, haystack))
process.exit(1)
end
end
assertFormatting(
"Should add a single space between arguments",
stdio.format("Hello", "world", "!"),
"Hello world !"
)
assert(
stdio.format({ Hello = "World" }) == '{\n Hello = "World",\n}',
"Format should print out proper tables"
assertFormatting(
"Should format tables in a sorted manner",
stdio.format({ A = "A", B = "B", C = "C" }),
'{\n A = "A",\n B = "B",\n C = "C",\n}'
)
assertFormatting(
"Should format tables properly with single values",
stdio.format({ Hello = "World" }),
'{\n Hello = "World",\n}'
)
assertFormatting(
"Should format tables properly with multiple values",
stdio.format({ Hello = "World", Hello2 = "Value" }),
'{\n Hello = "World",\n Hello2 = "Value",\n}'
)
assertFormatting(
"Should simplify array-like tables and not format keys",
stdio.format({ "Hello", "World" }),
'{\n "Hello",\n "World",\n}'
)
assertFormatting(
"Should still format numeric keys for mixed tables",
stdio.format({ "Hello", "World", Hello = "World" }),
'{\n [1] = "Hello",\n [2] = "World",\n Hello = "World",\n}'
)
local userdatas = {
Foo = newproxy(false),
Bar = regex.new("TEST"),
Baz = (roblox :: any).Vector3.new(1, 2, 3),
}
assertFormatting(
"Should format userdatas as generic 'userdata' if unknown",
stdio.format(userdatas.Foo),
"<userdata>"
)
assertContains(
"Should format userdatas with their type if they have a __type metafield",
stdio.format(userdatas.Bar),
"Regex"
)
assertContains(
"Should format userdatas with their type even if they have a __tostring metamethod",
stdio.format(userdatas.Baz),
"Vector3"
)
assertContains(
"Should format userdatas with their tostringed value if they have a __tostring metamethod",
stdio.format(userdatas.Baz),
"1, 2, 3"
)
assertFormatting(
"Should format userdatas properly in tables",
stdio.format(userdatas),
"{\n Bar = <Regex(TEST)>,\n Baz = <Vector3(1, 2, 3)>,\n Foo = <userdata>,\n}"
)
local nested = {
@ -22,7 +101,24 @@ local nested = {
},
}
assert(
string.find(stdio.format(nested), "Nesting = { ... }", 1, true) ~= nil,
"Format should print 4 levels of nested tables before cutting off"
assertContains(
"Should print 4 levels of nested tables before cutting off",
stdio.format(nested),
"Nesting = { ... }"
)
local _, errorMessage = pcall(function()
local function innerInnerFn()
process.spawn("PROGRAM_THAT_DOES_NOT_EXIST")
end
local function innerFn()
innerInnerFn()
end
innerFn()
end)
stdio.ewrite(typeof(errorMessage))
assertContains("Should format errors similarly to userdata", stdio.format(errorMessage), "<LuaErr")
assertContains("Should format errors with stack begins", stdio.format(errorMessage), "Stack Begin")
assertContains("Should format errors with stack ends", stdio.format(errorMessage), "Stack End")

View file

@ -27,11 +27,15 @@ export type CompileOptions = {
This is a dictionary that may contain one or more of the following values:
* `debugName` - The debug name of the closure. Defaults to `luau.load(...)`.
* `environment` - Environment values to set and/or override. Includes default globals unless overwritten.
* `environment` - A custom environment to load the chunk in. Setting a custom environment will deoptimize the chunk and forcefully disable codegen. Defaults to the global environment.
* `injectGlobals` - Whether or not to inject globals in the custom environment. Has no effect if no custom environment is provided. Defaults to `true`.
* `codegenEnabled` - Whether or not to enable codegen. Defaults to `false`.
]=]
export type LoadOptions = {
debugName: string?,
environment: { [string]: any }?,
injectGlobals: boolean?,
codegenEnabled: boolean?,
}
--[=[

View file

@ -2,6 +2,19 @@ export type EncodeDecodeFormat = "json" | "yaml" | "toml"
export type CompressDecompressFormat = "brotli" | "gzip" | "lz4" | "zlib"
export type HashAlgorithm =
"md5"
| "sha1"
| "sha224"
| "sha256"
| "sha384"
| "sha512"
| "sha3-224"
| "sha3-256"
| "sha3-384"
| "sha3-512"
| "blake3"
--[=[
@class Serde
@ -120,4 +133,16 @@ function serde.decompress(format: CompressDecompressFormat, s: buffer | string):
return nil :: any
end
function serde.hash(algorithm: HashAlgorithm, message: string | buffer): string
return nil :: any
end
function serde.hmac(
algorithm: HashAlgorithm,
message: string | buffer,
secret: string | buffer
): string
return nil :: any
end
return serde