Migrate to main lune repository

This commit is contained in:
Filip Tibell 2024-06-05 16:47:02 +02:00
parent 3ca27fa51c
commit d5ec279b81
No known key found for this signature in database
36 changed files with 2 additions and 3752 deletions

View file

@ -1,15 +0,0 @@
root = true
[*]
charset = utf-8
end_of_line = lf
trim_trailing_whitespace = true
insert_final_newline = true
[*.{json,jsonc,json5}]
indent_style = space
indent_size = 4
[*.{yml,yaml}]
indent_style = space
indent_size = 2

9
.gitattributes vendored
View file

@ -1,9 +0,0 @@
* text=auto
# Temporarily highlight luau as normal lua files
# until we get native linguist support for Luau
*.luau linguist-language=Lua
# Ensure all lua files use LF
*.lua eol=lf
*.luau eol=lf

View file

@ -1,75 +0,0 @@
name: CI
on:
push:
pull_request:
workflow_dispatch:
defaults:
run:
shell: bash
jobs:
fmt:
name: Check formatting
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
- name: Check Formatting
run: cargo fmt --check
ci:
needs: ["fmt"]
strategy:
fail-fast: false
matrix:
include:
- name: Windows x86_64
runner-os: windows-latest
cargo-target: x86_64-pc-windows-msvc
- name: Linux x86_64
runner-os: ubuntu-latest
cargo-target: x86_64-unknown-linux-gnu
- name: macOS x86_64
runner-os: macos-latest
cargo-target: x86_64-apple-darwin
name: CI - ${{ matrix.name }}
runs-on: ${{ matrix.runner-os }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
components: clippy
targets: ${{ matrix.cargo-target }}
- name: Build
run: |
cargo build \
--locked --all-features \
--target ${{ matrix.cargo-target }}
- name: Lint
run: |
cargo clippy \
--locked --all-features \
--target ${{ matrix.cargo-target }}
- name: Test
run: |
cargo test \
--locked --all-features \
--target ${{ matrix.cargo-target }}

1
.gitignore vendored
View file

@ -1 +0,0 @@
/target

View file

@ -1,8 +0,0 @@
{
"recommendations": [
"rust-lang.rust-analyzer",
"esbenp.prettier-vscode",
"JohnnyMorganz.stylua",
"DavidAnson.vscode-markdownlint"
]
}

17
.vscode/settings.json vendored
View file

@ -1,17 +0,0 @@
{
"luau-lsp.sourcemap.enabled": false,
"luau-lsp.types.roblox": false,
"rust-analyzer.check.command": "clippy",
"editor.formatOnSave": true,
"stylua.searchParentDirectories": true,
"prettier.tabWidth": 2,
"[luau][lua]": {
"editor.defaultFormatter": "JohnnyMorganz.stylua"
},
"[json][jsonc][markdown][yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[rust]": {
"editor.defaultFormatter": "rust-lang.rust-analyzer"
}
}

View file

@ -1,19 +0,0 @@
<!-- markdownlint-disable MD023 -->
<!-- markdownlint-disable MD033 -->
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## `0.0.2` - March 11th, 2024
### Changed
- Upgraded `mlua` version to `0.9.6` for more `ThreadId` optimizations
## `0.0.1` - February 16th, 2024
Initial release

952
Cargo.lock generated
View file

@ -1,952 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
name = "async-channel"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ca33f4bc4ed1babef42cad36cc1f51fa88be00420404e5b1e80ab1b18f7678c"
dependencies = [
"concurrent-queue",
"event-listener",
"event-listener-strategy",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-executor"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c"
dependencies = [
"async-lock",
"async-task",
"concurrent-queue",
"fastrand",
"futures-lite",
"slab",
]
[[package]]
name = "async-fs"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd1f344136bad34df1f83a47f3fd7f2ab85d75cb8a940af4ccf6d482a84ea01b"
dependencies = [
"async-lock",
"blocking",
"futures-lite",
]
[[package]]
name = "async-io"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb41eb19024a91746eba0773aa5e16036045bbf45733766661099e182ea6a744"
dependencies = [
"async-lock",
"cfg-if",
"concurrent-queue",
"futures-io",
"futures-lite",
"parking",
"polling",
"rustix",
"slab",
"tracing",
"windows-sys 0.52.0",
]
[[package]]
name = "async-lock"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b"
dependencies = [
"event-listener",
"event-listener-strategy",
"pin-project-lite",
]
[[package]]
name = "async-task"
version = "4.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799"
[[package]]
name = "atomic-waker"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "2.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
[[package]]
name = "blocking"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118"
dependencies = [
"async-channel",
"async-lock",
"async-task",
"fastrand",
"futures-io",
"futures-lite",
"piper",
"tracing",
]
[[package]]
name = "bstr"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "cc"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
dependencies = [
"libc",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "concurrent-queue"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "crossbeam-utils"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 1.0.109",
]
[[package]]
name = "erased-serde"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55d05712b2d8d88102bc9868020c9e5c7a1f5527c452b9b97450a1d006140ba7"
dependencies = [
"serde",
]
[[package]]
name = "errno"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "event-listener"
version = "4.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e"
dependencies = [
"concurrent-queue",
"parking",
"pin-project-lite",
]
[[package]]
name = "event-listener-strategy"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3"
dependencies = [
"event-listener",
"pin-project-lite",
]
[[package]]
name = "fastrand"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "futures-core"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
name = "futures-io"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
[[package]]
name = "futures-lite"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"parking",
"pin-project-lite",
]
[[package]]
name = "futures-task"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
[[package]]
name = "futures-util"
version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [
"futures-core",
"futures-task",
"pin-project-lite",
"pin-utils",
"slab",
]
[[package]]
name = "generator"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc16584ff22b460a382b7feec54b23d2908d858152e5739a120b949293bd74e"
dependencies = [
"cc",
"libc",
"log",
"rustversion",
"windows",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7"
[[package]]
name = "libloading"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
dependencies = [
"cfg-if",
"windows-sys 0.48.0",
]
[[package]]
name = "linux-raw-sys"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "log"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
[[package]]
name = "loom"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e045d70ddfbc984eacfa964ded019534e8f6cbf36f6410aee0ed5cefa5a9175"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "luau0-src"
version = "0.8.0+luau609"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c581e2f71ce092ff3d1c8e206bb7b9dc415e28d64e8fbe2d320ca4f35cc0f054"
dependencies = [
"cc",
]
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata 0.1.10",
]
[[package]]
name = "memchr"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
[[package]]
name = "mlua"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "868d02cb5eb97761bbf6bd6922c1c7a88b8ea252bbf43bd8350a0bf8497a1fc0"
dependencies = [
"bstr",
"erased-serde",
"futures-util",
"libloading",
"mlua-sys",
"num-traits",
"once_cell",
"rustc-hash",
"serde",
"serde-value",
]
[[package]]
name = "mlua-luau-scheduler"
version = "0.0.2"
dependencies = [
"async-executor",
"async-fs",
"async-io",
"blocking",
"concurrent-queue",
"derive_more",
"event-listener",
"futures-lite",
"mlua",
"rustc-hash",
"tracing",
"tracing-subscriber",
"tracing-tracy",
]
[[package]]
name = "mlua-sys"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2847b42764435201d8cbee1f517edb79c4cca4181877b90047587c89e1b7bce4"
dependencies = [
"cc",
"cfg-if",
"luau0-src",
"pkg-config",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num-traits"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
]
[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "ordered-float"
version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parking"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
[[package]]
name = "pin-project-lite"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "piper"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4"
dependencies = [
"atomic-waker",
"fastrand",
"futures-io",
]
[[package]]
name = "pkg-config"
version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb"
[[package]]
name = "polling"
version = "3.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41"
dependencies = [
"cfg-if",
"concurrent-queue",
"pin-project-lite",
"rustix",
"tracing",
"windows-sys 0.52.0",
]
[[package]]
name = "proc-macro2"
version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata 0.4.5",
"regex-syntax 0.8.2",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-automata"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.2",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver",
]
[[package]]
name = "rustix"
version = "0.38.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
]
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]]
name = "semver"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0"
[[package]]
name = "serde"
version = "1.0.195"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde-value"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
dependencies = [
"ordered-float",
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.195"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]]
name = "slab"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
dependencies = [
"autocfg",
]
[[package]]
name = "smallvec"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tracing"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]]
name = "tracing-core"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "tracing-tracy"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6024d04f84a69fd0d1dc1eee3a2b070bd246530a0582f9982ae487cb6c703614"
dependencies = [
"tracing-core",
"tracing-subscriber",
"tracy-client",
]
[[package]]
name = "tracy-client"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59fb931a64ff88984f86d3e9bcd1ae8843aa7fe44dd0f8097527bc172351741d"
dependencies = [
"loom",
"once_cell",
"tracy-client-sys",
]
[[package]]
name = "tracy-client-sys"
version = "0.22.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d104d610dfa9dd154535102cc9c6164ae1fa37842bc2d9e83f9ac82b0ae0882"
dependencies = [
"cc",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.0",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
"windows_aarch64_gnullvm 0.52.0",
"windows_aarch64_msvc 0.52.0",
"windows_i686_gnu 0.52.0",
"windows_i686_msvc 0.52.0",
"windows_x86_64_gnu 0.52.0",
"windows_x86_64_gnullvm 0.52.0",
"windows_x86_64_msvc 0.52.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"

View file

@ -1,69 +0,0 @@
[package]
name = "mlua-luau-scheduler"
version = "0.0.2"
edition = "2021"
license = "MPL-2.0"
repository = "https://github.com/lune-org/mlua-luau-scheduler"
description = "Luau-based async scheduler, using mlua and async-executor"
readme = "README.md"
keywords = ["async", "luau", "scheduler"]
categories = ["async"]
[dependencies]
async-executor = "1.8"
blocking = "1.5"
concurrent-queue = "2.4"
derive_more = "0.99"
event-listener = "4.0"
futures-lite = "2.2"
rustc-hash = "1.1"
tracing = "0.1"
mlua = { version = "0.9.6", features = [
"luau",
"luau-jit",
"async",
"serialize",
] }
[dev-dependencies]
async-fs = "2.1"
async-io = "2.3"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing-tracy = "0.11"
[lints.clippy]
all = { level = "deny", priority = -3 }
cargo = { level = "warn", priority = -2 }
pedantic = { level = "warn", priority = -1 }
[lib]
path = "lib/lib.rs"
[[example]]
name = "basic_sleep"
test = true
[[example]]
name = "basic_spawn"
test = true
[[example]]
name = "callbacks"
test = true
[[example]]
name = "exit_code"
test = true
[[example]]
name = "lots_of_threads"
test = true
[[example]]
name = "scheduler_ordering"
test = true
[[example]]
name = "tracy"
test = false

View file

@ -1,373 +0,0 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at https://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -1,91 +1,3 @@
<!-- markdownlint-disable MD033 -->
<!-- markdownlint-disable MD041 -->
# REPOSITORY HAS MOVED
<h1 align="center">mlua-luau-scheduler</h1>
<div align="center">
<div>
<a href="https://github.com/lune-org/mlua-luau-scheduler/actions">
<img src="https://shields.io/endpoint?url=https://badges.readysetplay.io/workflow/lune-org/mlua-luau-scheduler/ci.yaml" alt="CI status" />
</a>
<a href="https://github.com/lune-org/mlua-luau-scheduler/blob/main/LICENSE.txt">
<img src="https://img.shields.io/github/license/lune-org/mlua-luau-scheduler.svg?label=License&color=informational" alt="Crate license" />
</a>
</div>
</div>
<br/>
An async scheduler for Luau, using [`mlua`][mlua] and built on top of [`async-executor`][async-executor].
This crate is runtime-agnostic and is compatible with any async runtime, including [Tokio][tokio], [smol][smol], [async-std][async-std], and others. </br>
However, since many dependencies are shared with [smol][smol], depending on it over other runtimes may be preferred.
[async-executor]: https://crates.io/crates/async-executor
[async-std]: https://async.rs
[mlua]: https://crates.io/crates/mlua
[smol]: https://github.com/smol-rs/smol
[tokio]: https://tokio.rs
## Example Usage
### 1. Import dependencies
```rs
use std::time::{Duration, Instant};
use std::io::ErrorKind;
use async_io::{block_on, Timer};
use async_fs::read_to_string;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
```
### 2. Set up Lua environment
```rs
let lua = Lua::new();
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: f64| async move {
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
lua.globals().set(
"readFile",
lua.create_async_function(|lua, path: String| async move {
// Spawn background task that does not take up resources on the lua thread
// Normally, futures in mlua can not be shared across threads, but this can
let task = lua.spawn(async move {
match read_to_string(path).await {
Ok(s) => Ok(Some(s)),
Err(e) if e.kind() == ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
}
});
task.await.into_lua_err()
})?,
)?;
```
### 3. Set up scheduler, run threads
```rs
let sched = Scheduler::new(&lua)?;
// We can create multiple lua threads ...
let sleepThread = lua.load("sleep(0.1)");
let fileThread = lua.load("readFile(\"Cargo.toml\")");
// ... spawn them both onto the scheduler ...
sched.push_thread_front(sleepThread, ());
sched.push_thread_front(fileThread, ());
// ... and run until they finish
block_on(sched.run());
```
`mlua-luau-scheduler` has moved to the [main Lune repository](https://github.com/lune-org/lune).

View file

@ -1,44 +0,0 @@
#![allow(clippy::missing_errors_doc)]
use std::time::{Duration, Instant};
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::Scheduler;
const MAIN_SCRIPT: &str = include_str!("./lua/basic_sleep.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: f64| async move {
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
// Load the main script into a scheduler
let sched = Scheduler::new(&lua);
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_basic_sleep() -> LuaResult<()> {
main()
}

View file

@ -1,63 +0,0 @@
#![allow(clippy::missing_errors_doc)]
use std::io::ErrorKind;
use async_fs::read_to_string;
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::{LuaSpawnExt, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/basic_spawn.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
lua.globals().set(
"readFile",
lua.create_async_function(|lua, path: String| async move {
// Spawn background task that does not take up resources on the Lua thread
let task = lua.spawn(async move {
match read_to_string(path).await {
Ok(s) => Ok(Some(s)),
Err(e) if e.kind() == ErrorKind::NotFound => Ok(None),
Err(e) => Err(e),
}
});
// Wait for it to complete
let result = task.await.into_lua_err();
// We can also spawn local tasks that do take up resources
// on the Lua thread, but that do not have the Send bound
if result.is_ok() {
lua.spawn_local(async move {
println!("File read successfully!");
});
}
result
})?,
)?;
// Load the main script into a scheduler
let sched = Scheduler::new(&lua);
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_basic_spawn() -> LuaResult<()> {
main()
}

View file

@ -1,47 +0,0 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
use mlua::prelude::*;
use mlua_luau_scheduler::Scheduler;
use async_io::block_on;
const MAIN_SCRIPT: &str = include_str!("./lua/callbacks.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
// Create a new scheduler with custom callbacks
let sched = Scheduler::new(&lua);
sched.set_error_callback(|e| {
println!(
"Captured error from Lua!\n{}\n{e}\n{}",
"-".repeat(15),
"-".repeat(15)
);
});
// Load the main script into the scheduler, and keep track of the thread we spawn
let main = lua.load(MAIN_SCRIPT);
let id = sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// We should have gotten the error back from our script
assert!(sched.get_thread_result(id).unwrap().is_err());
Ok(())
}
#[test]
fn test_callbacks() -> LuaResult<()> {
main()
}

View file

@ -1,42 +0,0 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/exit_code.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("exit", fns.exit)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// Verify that we got a correct exit code
let code = sched.get_exit_code().unwrap_or_default();
assert!(format!("{code:?}").contains("(1)"));
Ok(())
}
#[test]
fn test_exit_code() -> LuaResult<()> {
main()
}

View file

@ -1,50 +0,0 @@
#![allow(clippy::missing_errors_doc)]
use std::time::Duration;
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/lots_of_threads.luau");
const ONE_NANOSECOND: Duration = Duration::from_nanos(1);
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, ()| async move {
// Obviously we can't sleep for a single nanosecond since
// this uses OS scheduling under the hood, but we can try
Timer::after(ONE_NANOSECOND).await;
Ok(())
})?,
)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}
#[test]
fn test_lots_of_threads() -> LuaResult<()> {
main()
}

View file

@ -1,13 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
print("Sleeping for 3 seconds...")
sleep(1)
print("1 second passed")
sleep(1)
print("2 seconds passed")
sleep(1)
print("3 seconds passed")

View file

@ -1,17 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
local _, err = pcall(function()
local file = readFile("Cargo.toml")
if file ~= nil then
print("Cargo.toml found!")
print("Contents:")
print(file)
else
print("Cargo.toml not found!")
end
end)
if err ~= nil then
print("Error while reading file: " .. err)
end

View file

@ -1,4 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
error("Oh no! Something went very very wrong!")

View file

@ -1,8 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
print("Setting exit code manually")
exit(1)
error("unreachable")

View file

@ -1,29 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
local NUM_BATCHES = 10
local NUM_THREADS = 100_000
print(`Spawning {NUM_BATCHES * NUM_THREADS} threads split into {NUM_BATCHES} batches\n`)
local before = os.clock()
for i = 1, NUM_BATCHES do
print(`Batch {i} of {NUM_BATCHES}`)
local thread = coroutine.running()
local counter = 0
for j = 1, NUM_THREADS do
spawn(function()
sleep(0.1)
counter += 1
if counter == NUM_THREADS then
spawn(thread)
end
end)
end
coroutine.yield()
end
local after = os.clock()
print(`\nSpawned {NUM_BATCHES * NUM_THREADS} sleeping threads in {after - before}s`)

View file

@ -1,34 +0,0 @@
--!nocheck
--!nolint UnknownGlobal
local nums = {}
local function insert(n: number)
table.insert(nums, n)
print(n)
end
insert(1)
-- Defer will run at the end of the resumption cycle, but without yielding
defer(function()
insert(5)
end)
-- Spawn will instantly run up until the first yield, and must then be resumed manually ...
spawn(function()
insert(2)
coroutine.yield()
error("unreachable code")
end)
-- ... unless calling functions created using `lua.create_async_function(...)`,
-- which will resume their calling thread with their result automatically
spawn(function()
insert(3)
sleep(1)
insert(6)
end)
insert(4)
return nums

View file

@ -1,55 +0,0 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::missing_panics_doc)]
use std::time::{Duration, Instant};
use async_io::{block_on, Timer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/scheduler_ordering.luau");
pub fn main() -> LuaResult<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_target(false)
.without_time()
.init();
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set("defer", fns.defer)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, duration: Option<f64>| async move {
let duration = duration.unwrap_or_default().max(1.0 / 250.0);
let before = Instant::now();
let after = Timer::after(Duration::from_secs_f64(duration)).await;
Ok((after - before).as_secs_f64())
})?,
)?;
// Load the main script into the scheduler, and keep track of the thread we spawn
let main = lua.load(MAIN_SCRIPT);
let id = sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
// We should have gotten proper values back from our script
let res = sched.get_thread_result(id).unwrap().unwrap();
let nums = Vec::<usize>::from_lua_multi(res, &lua)?;
assert_eq!(nums, vec![1, 2, 3, 4, 5, 6]);
Ok(())
}
#[test]
fn test_scheduler_ordering() -> LuaResult<()> {
main()
}

View file

@ -1,60 +0,0 @@
/*
NOTE: This example is the same as "lots_of_threads", but with tracy set up for performance profiling.
How to run:
1. Install tracy
- Follow the instructions at https://github.com/wolfpld/tracy
- Or install via something like homebrew: `brew install tracy`
2. Run the server (`tracy`) in a terminal
3. Run the example in another terminal
- `export RUST_LOG=trace`
- `cargo run --example tracy`
*/
#![allow(clippy::missing_errors_doc)]
use std::time::Duration;
use async_io::{block_on, Timer};
use tracing_subscriber::layer::SubscriberExt;
use tracing_tracy::{client::Client as TracyClient, TracyLayer};
use mlua::prelude::*;
use mlua_luau_scheduler::{Functions, Scheduler};
const MAIN_SCRIPT: &str = include_str!("./lua/lots_of_threads.luau");
const ONE_NANOSECOND: Duration = Duration::from_nanos(1);
pub fn main() -> LuaResult<()> {
let _client = TracyClient::start();
let _ = tracing::subscriber::set_global_default(
tracing_subscriber::registry().with(TracyLayer::default()),
);
// Set up persistent Lua environment
let lua = Lua::new();
let sched = Scheduler::new(&lua);
let fns = Functions::new(&lua)?;
lua.globals().set("spawn", fns.spawn)?;
lua.globals().set(
"sleep",
lua.create_async_function(|_, ()| async move {
// Obviously we can't sleep for a single nanosecond since
// this uses OS scheduling under the hood, but we can try
Timer::after(ONE_NANOSECOND).await;
Ok(())
})?,
)?;
// Load the main script into the scheduler
let main = lua.load(MAIN_SCRIPT);
sched.push_thread_front(main, ())?;
// Run until completion
block_on(sched.run());
Ok(())
}

View file

@ -1,45 +0,0 @@
use std::{cell::RefCell, rc::Rc};
use mlua::prelude::*;
type ErrorCallback = Box<dyn Fn(LuaError) + Send + 'static>;
#[derive(Clone)]
pub(crate) struct ThreadErrorCallback {
inner: Rc<RefCell<Option<ErrorCallback>>>,
}
impl ThreadErrorCallback {
pub fn new() -> Self {
Self {
inner: Rc::new(RefCell::new(None)),
}
}
pub fn replace(&self, callback: impl Fn(LuaError) + Send + 'static) {
self.inner.borrow_mut().replace(Box::new(callback));
}
pub fn clear(&self) {
self.inner.borrow_mut().take();
}
pub fn call(&self, error: &LuaError) {
if let Some(cb) = &*self.inner.borrow() {
cb(error.clone());
}
}
}
#[allow(clippy::needless_pass_by_value)]
fn default_error_callback(e: LuaError) {
eprintln!("{e}");
}
impl Default for ThreadErrorCallback {
fn default() -> Self {
let this = Self::new();
this.replace(default_error_callback);
this
}
}

View file

@ -1,31 +0,0 @@
use std::{cell::Cell, process::ExitCode, rc::Rc};
use event_listener::Event;
#[derive(Debug, Clone)]
pub(crate) struct Exit {
code: Rc<Cell<Option<ExitCode>>>,
event: Rc<Event>,
}
impl Exit {
pub fn new() -> Self {
Self {
code: Rc::new(Cell::new(None)),
event: Rc::new(Event::new()),
}
}
pub fn set(&self, code: ExitCode) {
self.code.set(Some(code));
self.event.notify(usize::MAX);
}
pub fn get(&self) -> Option<ExitCode> {
self.code.get()
}
pub async fn listen(&self) {
self.event.listen().await;
}
}

View file

@ -1,283 +0,0 @@
#![allow(unused_imports)]
#![allow(clippy::too_many_lines)]
use std::process::ExitCode;
use mlua::prelude::*;
use crate::{
error_callback::ThreadErrorCallback,
queue::{DeferredThreadQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
scheduler::Scheduler,
thread_id::ThreadId,
traits::LuaSchedulerExt,
util::{is_poll_pending, LuaThreadOrFunction, ThreadResult},
};
const ERR_METADATA_NOT_ATTACHED: &str = "\
Lua state does not have scheduler metadata attached!\
\nThis is most likely caused by creating functions outside of a scheduler.\
\nScheduler functions must always be created from within an active scheduler.\
";
const EXIT_IMPL_LUA: &str = r"
exit(...)
yield()
";
const WRAP_IMPL_LUA: &str = r"
local t = create(...)
return function(...)
local r = { resume(t, ...) }
if r[1] then
return select(2, unpack(r))
else
error(r[2], 2)
end
end
";
/**
A collection of lua functions that may be called to interact with a [`Scheduler`].
Note that these may all be implemented using [`LuaSchedulerExt`], however, this struct
is implemented using internal (non-public) APIs, and generally has better performance.
*/
pub struct Functions<'lua> {
/**
Implementation of `coroutine.resume` that handles async polling properly.
Defers onto the scheduler queue if the thread calls an async function.
*/
pub resume: LuaFunction<'lua>,
/**
Implementation of `coroutine.wrap` that handles async polling properly.
Defers onto the scheduler queue if the thread calls an async function.
*/
pub wrap: LuaFunction<'lua>,
/**
Resumes a function / thread once instantly, and runs until first yield.
Spawns onto the scheduler queue if not completed.
*/
pub spawn: LuaFunction<'lua>,
/**
Defers a function / thread onto the scheduler queue.
Does not resume instantly, only adds to the queue.
*/
pub defer: LuaFunction<'lua>,
/**
Cancels a function / thread, removing it from the queue.
*/
pub cancel: LuaFunction<'lua>,
/**
Exits the scheduler, stopping all other threads and closing the scheduler.
Yields the calling thread to ensure that it does not continue.
*/
pub exit: LuaFunction<'lua>,
}
impl<'lua> Functions<'lua> {
/**
Creates a new collection of Lua functions that may be called to interact with a [`Scheduler`].
# Errors
Errors when out of memory, or if default Lua globals are missing.
# Panics
Panics when the given [`Lua`] instance does not have an attached [`Scheduler`].
*/
pub fn new(lua: &'lua Lua) -> LuaResult<Self> {
let spawn_queue = lua
.app_data_ref::<SpawnedThreadQueue>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let defer_queue = lua
.app_data_ref::<DeferredThreadQueue>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let error_callback = lua
.app_data_ref::<ThreadErrorCallback>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let result_map = lua
.app_data_ref::<ThreadResultMap>()
.expect(ERR_METADATA_NOT_ATTACHED)
.clone();
let resume_queue = defer_queue.clone();
let resume_map = result_map.clone();
let resume =
lua.create_function(move |lua, (thread, args): (LuaThread, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_resume").entered();
match thread.resume::<_, LuaMultiValue>(args.clone()) {
Ok(v) => {
if v.get(0).map(is_poll_pending).unwrap_or_default() {
// Pending, defer to scheduler and return nil
resume_queue.push_item(lua, &thread, args)?;
(true, LuaValue::Nil).into_lua_multi(lua)
} else {
// Not pending, store the value if thread is done
if thread.status() != LuaThreadStatus::Resumable {
let id = ThreadId::from(&thread);
if resume_map.is_tracked(id) {
let res = ThreadResult::new(Ok(v.clone()), lua);
resume_map.insert(id, res);
}
}
(true, v).into_lua_multi(lua)
}
}
Err(e) => {
// Not pending, store the error
let id = ThreadId::from(&thread);
if resume_map.is_tracked(id) {
let res = ThreadResult::new(Err(e.clone()), lua);
resume_map.insert(id, res);
}
(false, e.to_string()).into_lua_multi(lua)
}
}
})?;
let wrap_env = lua.create_table_from(vec![
("resume", resume.clone()),
("error", lua.globals().get::<_, LuaFunction>("error")?),
("select", lua.globals().get::<_, LuaFunction>("select")?),
("unpack", lua.globals().get::<_, LuaFunction>("unpack")?),
(
"create",
lua.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("create")?,
),
])?;
let wrap = lua
.load(WRAP_IMPL_LUA)
.set_name("=__scheduler_wrap")
.set_environment(wrap_env)
.into_function()?;
let spawn_map = result_map.clone();
let spawn = lua.create_function(
move |lua, (tof, args): (LuaThreadOrFunction, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_spawn").entered();
let thread = tof.into_thread(lua)?;
if thread.status() == LuaThreadStatus::Resumable {
// NOTE: We need to resume the thread once instantly for correct behavior,
// and only if we get the pending value back we can spawn to async executor
match thread.resume::<_, LuaMultiValue>(args.clone()) {
Ok(v) => {
if v.get(0).map(is_poll_pending).unwrap_or_default() {
spawn_queue.push_item(lua, &thread, args)?;
} else {
// Not pending, store the value if thread is done
if thread.status() != LuaThreadStatus::Resumable {
let id = ThreadId::from(&thread);
if spawn_map.is_tracked(id) {
let res = ThreadResult::new(Ok(v), lua);
spawn_map.insert(id, res);
}
}
}
}
Err(e) => {
error_callback.call(&e);
// Not pending, store the error
let id = ThreadId::from(&thread);
if spawn_map.is_tracked(id) {
let res = ThreadResult::new(Err(e), lua);
spawn_map.insert(id, res);
}
}
};
}
Ok(thread)
},
)?;
let defer = lua.create_function(
move |lua, (tof, args): (LuaThreadOrFunction, LuaMultiValue)| {
let _span = tracing::trace_span!("Scheduler::fn_defer").entered();
let thread = tof.into_thread(lua)?;
if thread.status() == LuaThreadStatus::Resumable {
defer_queue.push_item(lua, &thread, args)?;
}
Ok(thread)
},
)?;
let close = lua
.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("close")?;
let close_key = lua.create_registry_value(close)?;
let cancel = lua.create_function(move |lua, thread: LuaThread| {
let _span = tracing::trace_span!("Scheduler::fn_cancel").entered();
let close: LuaFunction = lua.registry_value(&close_key)?;
match close.call(thread) {
Err(LuaError::CoroutineInactive) | Ok(()) => Ok(()),
Err(e) => Err(e),
}
})?;
let exit_env = lua.create_table_from(vec![
(
"exit",
lua.create_function(|lua, code: Option<u8>| {
let _span = tracing::trace_span!("Scheduler::fn_exit").entered();
let code = code.map(ExitCode::from).unwrap_or_default();
lua.set_exit_code(code);
Ok(())
})?,
),
(
"yield",
lua.globals()
.get::<_, LuaTable>("coroutine")?
.get::<_, LuaFunction>("yield")?,
),
])?;
let exit = lua
.load(EXIT_IMPL_LUA)
.set_name("=__scheduler_exit")
.set_environment(exit_env)
.into_function()?;
Ok(Self {
resume,
wrap,
spawn,
defer,
cancel,
exit,
})
}
}
impl Functions<'_> {
/**
Injects [`Scheduler`]-compatible functions into the given [`Lua`] instance.
This will overwrite the following functions:
- `coroutine.resume`
- `coroutine.wrap`
# Errors
Errors when out of memory, or if default Lua globals are missing.
*/
pub fn inject_compat(&self, lua: &Lua) -> LuaResult<()> {
let co: LuaTable = lua.globals().get("coroutine")?;
co.set("resume", self.resume.clone())?;
co.set("wrap", self.wrap.clone())?;
Ok(())
}
}

View file

@ -1,16 +0,0 @@
mod error_callback;
mod exit;
mod functions;
mod queue;
mod result_map;
mod scheduler;
mod status;
mod thread_id;
mod traits;
mod util;
pub use functions::Functions;
pub use scheduler::Scheduler;
pub use status::Status;
pub use thread_id::ThreadId;
pub use traits::{IntoLuaThread, LuaSchedulerExt, LuaSpawnExt};

View file

@ -1,139 +0,0 @@
use std::{pin::Pin, rc::Rc};
use concurrent_queue::ConcurrentQueue;
use derive_more::{Deref, DerefMut};
use event_listener::Event;
use futures_lite::{Future, FutureExt};
use mlua::prelude::*;
use crate::{traits::IntoLuaThread, util::ThreadWithArgs, ThreadId};
/**
Queue for storing [`LuaThread`]s with associated arguments.
Provides methods for pushing and draining the queue, as
well as listening for new items being pushed to the queue.
*/
#[derive(Debug, Clone)]
pub(crate) struct ThreadQueue {
queue: Rc<ConcurrentQueue<ThreadWithArgs>>,
event: Rc<Event>,
}
impl ThreadQueue {
pub fn new() -> Self {
let queue = Rc::new(ConcurrentQueue::unbounded());
let event = Rc::new(Event::new());
Self { queue, event }
}
pub fn push_item<'lua>(
&self,
lua: &'lua Lua,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let thread = thread.into_lua_thread(lua)?;
let args = args.into_lua_multi(lua)?;
tracing::trace!("pushing item to queue with {} args", args.len());
let id = ThreadId::from(&thread);
let stored = ThreadWithArgs::new(lua, thread, args)?;
self.queue.push(stored).into_lua_err()?;
self.event.notify(usize::MAX);
Ok(id)
}
#[inline]
pub fn drain_items<'outer, 'lua>(
&'outer self,
lua: &'lua Lua,
) -> impl Iterator<Item = (LuaThread<'lua>, LuaMultiValue<'lua>)> + 'outer
where
'lua: 'outer,
{
self.queue.try_iter().map(|stored| stored.into_inner(lua))
}
#[inline]
pub async fn wait_for_item(&self) {
if self.queue.is_empty() {
let listener = self.event.listen();
// NOTE: Need to check again, we could have gotten
// new queued items while creating our listener
if self.queue.is_empty() {
listener.await;
}
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.queue.is_empty()
}
}
/**
Alias for [`ThreadQueue`], providing a newtype to store in Lua app data.
*/
#[derive(Debug, Clone, Deref, DerefMut)]
pub(crate) struct SpawnedThreadQueue(ThreadQueue);
impl SpawnedThreadQueue {
pub fn new() -> Self {
Self(ThreadQueue::new())
}
}
/**
Alias for [`ThreadQueue`], providing a newtype to store in Lua app data.
*/
#[derive(Debug, Clone, Deref, DerefMut)]
pub(crate) struct DeferredThreadQueue(ThreadQueue);
impl DeferredThreadQueue {
pub fn new() -> Self {
Self(ThreadQueue::new())
}
}
pub type LocalBoxFuture<'fut> = Pin<Box<dyn Future<Output = ()> + 'fut>>;
/**
Queue for storing local futures.
Provides methods for pushing and draining the queue, as
well as listening for new items being pushed to the queue.
*/
#[derive(Debug, Clone)]
pub(crate) struct FuturesQueue<'fut> {
queue: Rc<ConcurrentQueue<LocalBoxFuture<'fut>>>,
event: Rc<Event>,
}
impl<'fut> FuturesQueue<'fut> {
pub fn new() -> Self {
let queue = Rc::new(ConcurrentQueue::unbounded());
let event = Rc::new(Event::new());
Self { queue, event }
}
pub fn push_item(&self, fut: impl Future<Output = ()> + 'fut) {
let _ = self.queue.push(fut.boxed_local());
self.event.notify(usize::MAX);
}
pub fn drain_items<'outer>(
&'outer self,
) -> impl Iterator<Item = LocalBoxFuture<'fut>> + 'outer {
self.queue.try_iter()
}
pub async fn wait_for_item(&self) {
if self.queue.is_empty() {
self.event.listen().await;
}
}
}

View file

@ -1,64 +0,0 @@
#![allow(clippy::inline_always)]
use std::{cell::RefCell, rc::Rc};
use event_listener::Event;
// NOTE: This is the hash algorithm that mlua also uses, so we
// are not adding any additional dependencies / bloat by using it.
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{thread_id::ThreadId, util::ThreadResult};
#[derive(Clone)]
pub(crate) struct ThreadResultMap {
tracked: Rc<RefCell<FxHashSet<ThreadId>>>,
results: Rc<RefCell<FxHashMap<ThreadId, ThreadResult>>>,
events: Rc<RefCell<FxHashMap<ThreadId, Rc<Event>>>>,
}
impl ThreadResultMap {
pub fn new() -> Self {
Self {
tracked: Rc::new(RefCell::new(FxHashSet::default())),
results: Rc::new(RefCell::new(FxHashMap::default())),
events: Rc::new(RefCell::new(FxHashMap::default())),
}
}
#[inline(always)]
pub fn track(&self, id: ThreadId) {
self.tracked.borrow_mut().insert(id);
}
#[inline(always)]
pub fn is_tracked(&self, id: ThreadId) -> bool {
self.tracked.borrow().contains(&id)
}
pub fn insert(&self, id: ThreadId, result: ThreadResult) {
debug_assert!(self.is_tracked(id), "Thread must be tracked");
self.results.borrow_mut().insert(id, result);
if let Some(event) = self.events.borrow_mut().remove(&id) {
event.notify(usize::MAX);
}
}
pub async fn listen(&self, id: ThreadId) {
debug_assert!(self.is_tracked(id), "Thread must be tracked");
if !self.results.borrow().contains_key(&id) {
let listener = {
let mut events = self.events.borrow_mut();
let event = events.entry(id).or_insert_with(|| Rc::new(Event::new()));
event.listen()
};
listener.await;
}
}
pub fn remove(&self, id: ThreadId) -> Option<ThreadResult> {
let res = self.results.borrow_mut().remove(&id)?;
self.tracked.borrow_mut().remove(&id);
self.events.borrow_mut().remove(&id);
Some(res)
}
}

View file

@ -1,484 +0,0 @@
#![allow(clippy::module_name_repetitions)]
use std::{
cell::Cell,
process::ExitCode,
rc::{Rc, Weak as WeakRc},
sync::{Arc, Weak as WeakArc},
thread::panicking,
};
use futures_lite::prelude::*;
use mlua::prelude::*;
use async_executor::{Executor, LocalExecutor};
use tracing::{debug, instrument, trace, trace_span, Instrument};
use crate::{
error_callback::ThreadErrorCallback,
exit::Exit,
queue::{DeferredThreadQueue, FuturesQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
status::Status,
thread_id::ThreadId,
traits::IntoLuaThread,
util::{run_until_yield, ThreadResult},
};
const ERR_METADATA_ALREADY_ATTACHED: &str = "\
Lua state already has scheduler metadata attached!\
\nThis may be caused by running multiple schedulers on the same Lua state, or a call to Scheduler::run being cancelled.\
\nOnly one scheduler can be used per Lua state at once, and schedulers must always run until completion.\
";
const ERR_METADATA_REMOVED: &str = "\
Lua state scheduler metadata was unexpectedly removed!\
\nThis should never happen, and is likely a bug in the scheduler.\
";
const ERR_SET_CALLBACK_WHEN_RUNNING: &str = "\
Cannot set error callback when scheduler is running!\
";
/**
A scheduler for running Lua threads and async tasks.
*/
#[derive(Clone)]
pub struct Scheduler<'lua> {
lua: &'lua Lua,
queue_spawn: SpawnedThreadQueue,
queue_defer: DeferredThreadQueue,
error_callback: ThreadErrorCallback,
result_map: ThreadResultMap,
status: Rc<Cell<Status>>,
exit: Exit,
}
impl<'lua> Scheduler<'lua> {
/**
Creates a new scheduler for the given Lua state.
This scheduler will have a default error callback that prints errors to stderr.
# Panics
Panics if the given Lua state already has a scheduler attached to it.
*/
#[must_use]
pub fn new(lua: &'lua Lua) -> Scheduler<'lua> {
let queue_spawn = SpawnedThreadQueue::new();
let queue_defer = DeferredThreadQueue::new();
let error_callback = ThreadErrorCallback::default();
let result_map = ThreadResultMap::new();
let exit = Exit::new();
assert!(
lua.app_data_ref::<SpawnedThreadQueue>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<DeferredThreadQueue>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<ThreadErrorCallback>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<ThreadResultMap>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
lua.app_data_ref::<Exit>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
lua.set_app_data(queue_spawn.clone());
lua.set_app_data(queue_defer.clone());
lua.set_app_data(error_callback.clone());
lua.set_app_data(result_map.clone());
lua.set_app_data(exit.clone());
let status = Rc::new(Cell::new(Status::NotStarted));
Scheduler {
lua,
queue_spawn,
queue_defer,
error_callback,
result_map,
status,
exit,
}
}
/**
Sets the current status of this scheduler and emits relevant tracing events.
*/
fn set_status(&self, status: Status) {
debug!(status = ?status, "status");
self.status.set(status);
}
/**
Returns the current status of this scheduler.
*/
#[must_use]
pub fn status(&self) -> Status {
self.status.get()
}
/**
Sets the error callback for this scheduler.
This callback will be called whenever a Lua thread errors.
Overwrites any previous error callback.
# Panics
Panics if the scheduler is currently running.
*/
pub fn set_error_callback(&self, callback: impl Fn(LuaError) + Send + 'static) {
assert!(
!self.status().is_running(),
"{ERR_SET_CALLBACK_WHEN_RUNNING}"
);
self.error_callback.replace(callback);
}
/**
Clears the error callback for this scheduler.
This will remove any current error callback, including default(s).
# Panics
Panics if the scheduler is currently running.
*/
pub fn remove_error_callback(&self) {
assert!(
!self.status().is_running(),
"{ERR_SET_CALLBACK_WHEN_RUNNING}"
);
self.error_callback.clear();
}
/**
Gets the exit code for this scheduler, if one has been set.
*/
#[must_use]
pub fn get_exit_code(&self) -> Option<ExitCode> {
self.exit.get()
}
/**
Sets the exit code for this scheduler.
This will cause [`Scheduler::run`] to exit immediately.
*/
pub fn set_exit_code(&self, code: ExitCode) {
self.exit.set(code);
}
/**
Spawns a chunk / function / thread onto the scheduler queue.
Threads are guaranteed to be resumed in the order that they were pushed to the queue.
# Returns
Returns a [`ThreadId`] that can be used to retrieve the result of the thread.
Note that the result may not be available until [`Scheduler::run`] completes.
# Errors
Errors when out of memory.
*/
pub fn push_thread_front(
&self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let id = self.queue_spawn.push_item(self.lua, thread, args)?;
self.result_map.track(id);
Ok(id)
}
/**
Defers a chunk / function / thread onto the scheduler queue.
Deferred threads are guaranteed to run after all spawned threads either yield or complete.
Threads are guaranteed to be resumed in the order that they were pushed to the queue.
# Returns
Returns a [`ThreadId`] that can be used to retrieve the result of the thread.
Note that the result may not be available until [`Scheduler::run`] completes.
# Errors
Errors when out of memory.
*/
pub fn push_thread_back(
&self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let id = self.queue_defer.push_item(self.lua, thread, args)?;
self.result_map.track(id);
Ok(id)
}
/**
Gets the tracked result for the [`LuaThread`] with the given [`ThreadId`].
Depending on the current [`Scheduler::status`], this method will return:
- [`Status::NotStarted`]: returns `None`.
- [`Status::Running`]: may return `Some(Ok(v))` or `Some(Err(e))`, but it is not guaranteed.
- [`Status::Completed`]: returns `Some(Ok(v))` or `Some(Err(e))`.
Note that this method also takes the value out of the scheduler and
stops tracking the given thread, so it may only be called once.
Any subsequent calls after this method returns `Some` will return `None`.
*/
#[must_use]
pub fn get_thread_result(&self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>> {
self.result_map.remove(id).map(|r| r.value(self.lua))
}
/**
Waits for the [`LuaThread`] with the given [`ThreadId`] to complete.
This will return instantly if the thread has already completed.
*/
pub async fn wait_for_thread(&self, id: ThreadId) {
self.result_map.listen(id).await;
}
/**
Runs the scheduler until all Lua threads have completed.
Note that the given Lua state must be the same one that was
used to create this scheduler, otherwise this method will panic.
# Panics
Panics if the given Lua state already has a scheduler attached to it.
*/
#[allow(clippy::too_many_lines)]
#[instrument(level = "debug", name = "Scheduler::run", skip(self))]
pub async fn run(&self) {
/*
Create new executors to use - note that we do not need create multiple executors
for work stealing, the user may do that themselves if they want to and it will work
just fine, as long as anything async is .await-ed from within a Lua async function.
The main purpose of the two executors here is just to have one with
the Send bound, and another (local) one without it, for Lua scheduling.
We also use the main executor to drive the main loop below forward,
saving a tiny bit of processing from going on the Lua executor itself.
*/
let local_exec = LocalExecutor::new();
let main_exec = Arc::new(Executor::new());
let fut_queue = Rc::new(FuturesQueue::new());
/*
Store the main executor and queue in Lua, so that they may be used with LuaSchedulerExt.
Also ensure we do not already have an executor or queues - these are definite user errors
and may happen if the user tries to run multiple schedulers on the same Lua state at once.
*/
assert!(
self.lua.app_data_ref::<WeakArc<Executor>>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
assert!(
self.lua.app_data_ref::<WeakRc<FuturesQueue>>().is_none(),
"{ERR_METADATA_ALREADY_ATTACHED}"
);
self.lua.set_app_data(Arc::downgrade(&main_exec));
self.lua.set_app_data(Rc::downgrade(&fut_queue.clone()));
/*
Manually tick the Lua executor, while running under the main executor.
Each tick we wait for the next action to perform, in prioritized order:
1. The exit event is triggered by setting an exit code
2. A Lua thread is available to run on the spawned queue
3. A Lua thread is available to run on the deferred queue
4. A new thread-local future is available to run on the local executor
5. Task(s) scheduled on the Lua executor have made progress and should be polled again
This ordering is vital to ensure that we don't accidentally exit the main loop
when there are new Lua threads to enqueue and potentially more work to be done.
*/
let fut = async {
let result_map = self.result_map.clone();
let process_thread = |thread: LuaThread<'lua>, args| {
// NOTE: Thread may have been cancelled from Lua
// before we got here, so we need to check it again
if thread.status() == LuaThreadStatus::Resumable {
// Check if we should be tracking this thread
let id = ThreadId::from(&thread);
let id_tracked = result_map.is_tracked(id);
let result_map_inner = if id_tracked {
Some(result_map.clone())
} else {
None
};
// Create our future which will run the thread and store its final result
let fut = async move {
if id_tracked {
// Run until yield and check if we got a final result
if let Some(res) = run_until_yield(thread.clone(), args).await {
if let Err(e) = res.as_ref() {
self.error_callback.call(e);
}
if thread.status() != LuaThreadStatus::Resumable {
let thread_res = ThreadResult::new(res, self.lua);
result_map_inner.unwrap().insert(id, thread_res);
}
}
} else {
// Just run until yield
if let Some(res) = run_until_yield(thread, args).await {
if let Err(e) = res.as_ref() {
self.error_callback.call(e);
}
}
}
};
// Spawn it on the executor
local_exec.spawn(fut).detach();
}
};
loop {
let fut_exit = self.exit.listen(); // 1
let fut_spawn = self.queue_spawn.wait_for_item(); // 2
let fut_defer = self.queue_defer.wait_for_item(); // 3
let fut_futs = fut_queue.wait_for_item(); // 4
// 5
let mut num_processed = 0;
let span_tick = trace_span!("Scheduler::tick");
let fut_tick = async {
local_exec.tick().await;
// NOTE: Try to do as much work as possible instead of just a single tick()
num_processed += 1;
while local_exec.try_tick() {
num_processed += 1;
}
};
// 1 + 2 + 3 + 4 + 5
fut_exit
.or(fut_spawn)
.or(fut_defer)
.or(fut_futs)
.or(fut_tick.instrument(span_tick.or_current()))
.await;
// Check if we should exit
if self.exit.get().is_some() {
debug!("exit signal received");
break;
}
// Process spawned threads first, then deferred threads, then futures
let mut num_spawned = 0;
let mut num_deferred = 0;
let mut num_futures = 0;
{
let _span = trace_span!("Scheduler::drain_spawned").entered();
for (thread, args) in self.queue_spawn.drain_items(self.lua) {
process_thread(thread, args);
num_spawned += 1;
}
}
{
let _span = trace_span!("Scheduler::drain_deferred").entered();
for (thread, args) in self.queue_defer.drain_items(self.lua) {
process_thread(thread, args);
num_deferred += 1;
}
}
{
let _span = trace_span!("Scheduler::drain_futures").entered();
for fut in fut_queue.drain_items() {
local_exec.spawn(fut).detach();
num_futures += 1;
}
}
// Empty executor = we didn't spawn any new Lua tasks
// above, and there are no remaining tasks to run later
let completed = local_exec.is_empty()
&& self.queue_spawn.is_empty()
&& self.queue_defer.is_empty();
trace!(
futures_spawned = num_futures,
futures_processed = num_processed,
lua_threads_spawned = num_spawned,
lua_threads_deferred = num_deferred,
"loop"
);
if completed {
break;
}
}
};
// Run the executor inside a span until all lua threads complete
self.set_status(Status::Running);
main_exec.run(fut).await;
self.set_status(Status::Completed);
// Clean up
self.lua
.remove_app_data::<WeakArc<Executor>>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<WeakRc<FuturesQueue>>()
.expect(ERR_METADATA_REMOVED);
}
}
impl Drop for Scheduler<'_> {
fn drop(&mut self) {
if panicking() {
// Do not cause further panics if already panicking, as
// this may abort the program instead of safely unwinding
self.lua.remove_app_data::<SpawnedThreadQueue>();
self.lua.remove_app_data::<DeferredThreadQueue>();
self.lua.remove_app_data::<ThreadErrorCallback>();
self.lua.remove_app_data::<ThreadResultMap>();
self.lua.remove_app_data::<Exit>();
} else {
// In any other case we panic if metadata was removed incorrectly
self.lua
.remove_app_data::<SpawnedThreadQueue>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<DeferredThreadQueue>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<ThreadErrorCallback>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<ThreadResultMap>()
.expect(ERR_METADATA_REMOVED);
self.lua
.remove_app_data::<Exit>()
.expect(ERR_METADATA_REMOVED);
}
}
}

View file

@ -1,31 +0,0 @@
#![allow(clippy::module_name_repetitions)]
/**
The current status of a scheduler.
*/
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Status {
/// The scheduler has not yet started running.
NotStarted,
/// The scheduler is currently running.
Running,
/// The scheduler has completed.
Completed,
}
impl Status {
#[must_use]
pub const fn is_not_started(self) -> bool {
matches!(self, Self::NotStarted)
}
#[must_use]
pub const fn is_running(self) -> bool {
matches!(self, Self::Running)
}
#[must_use]
pub const fn is_completed(self) -> bool {
matches!(self, Self::Completed)
}
}

View file

@ -1,30 +0,0 @@
use std::hash::{Hash, Hasher};
use mlua::prelude::*;
/**
Opaque and unique ID representing a [`LuaThread`].
Typically used for associating metadata with a thread in a structure such as a `HashMap<ThreadId, ...>`.
Note that holding a `ThreadId` does not prevent the thread from being garbage collected.
The actual thread may or may not still exist and be active at any given point in time.
*/
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ThreadId {
inner: usize,
}
impl From<&LuaThread<'_>> for ThreadId {
fn from(thread: &LuaThread) -> Self {
Self {
inner: thread.to_pointer() as usize,
}
}
}
impl Hash for ThreadId {
fn hash<H: Hasher>(&self, state: &mut H) {
self.inner.hash(state);
}
}

View file

@ -1,378 +0,0 @@
#![allow(unused_imports)]
#![allow(clippy::missing_errors_doc)]
use std::{
cell::Cell, future::Future, process::ExitCode, rc::Weak as WeakRc, sync::Weak as WeakArc,
};
use async_executor::{Executor, Task};
use mlua::prelude::*;
use tracing::trace;
use crate::{
exit::Exit,
queue::{DeferredThreadQueue, FuturesQueue, SpawnedThreadQueue},
result_map::ThreadResultMap,
scheduler::Scheduler,
thread_id::ThreadId,
};
/**
Trait for any struct that can be turned into an [`LuaThread`]
and passed to the scheduler, implemented for the following types:
- Lua threads ([`LuaThread`])
- Lua functions ([`LuaFunction`])
- Lua chunks ([`LuaChunk`])
*/
pub trait IntoLuaThread<'lua> {
/**
Converts the value into a Lua thread.
# Errors
Errors when out of memory.
*/
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>>;
}
impl<'lua> IntoLuaThread<'lua> for LuaThread<'lua> {
fn into_lua_thread(self, _: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
Ok(self)
}
}
impl<'lua> IntoLuaThread<'lua> for LuaFunction<'lua> {
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
lua.create_thread(self)
}
}
impl<'lua> IntoLuaThread<'lua> for LuaChunk<'lua, '_> {
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
lua.create_thread(self.into_function()?)
}
}
impl<'lua, T> IntoLuaThread<'lua> for &T
where
T: IntoLuaThread<'lua> + Clone,
{
fn into_lua_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
self.clone().into_lua_thread(lua)
}
}
/**
Trait for interacting with the current [`Scheduler`].
Provides extra methods on the [`Lua`] struct for:
- Setting the exit code and forcibly stopping the scheduler
- Pushing (spawning) and deferring (pushing to the back) lua threads
- Tracking and getting the result of lua threads
*/
pub trait LuaSchedulerExt<'lua> {
/**
Sets the exit code of the current scheduler.
See [`Scheduler::set_exit_code`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn set_exit_code(&self, code: ExitCode);
/**
Pushes (spawns) a lua thread to the **front** of the current scheduler.
See [`Scheduler::push_thread_front`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn push_thread_front(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId>;
/**
Pushes (defers) a lua thread to the **back** of the current scheduler.
See [`Scheduler::push_thread_back`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn push_thread_back(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId>;
/**
Registers the given thread to be tracked within the current scheduler.
Must be called before waiting for a thread to complete or getting its result.
*/
fn track_thread(&'lua self, id: ThreadId);
/**
Gets the result of the given thread.
See [`Scheduler::get_thread_result`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn get_thread_result(&'lua self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>>;
/**
Waits for the given thread to complete.
See [`Scheduler::wait_for_thread`] for more information.
# Panics
Panics if called outside of a running [`Scheduler`].
*/
fn wait_for_thread(&'lua self, id: ThreadId) -> impl Future<Output = ()>;
}
/**
Trait for interacting with the [`Executor`] for the current [`Scheduler`].
Provides extra methods on the [`Lua`] struct for:
- Spawning thread-local (`!Send`) futures on the current executor
- Spawning background (`Send`) futures on the current executor
- Spawning blocking tasks on a separate thread pool
*/
pub trait LuaSpawnExt<'lua> {
/**
Spawns the given future on the current executor and returns its [`Task`].
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnBackgroundTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn(async move {
println!("Hello from background task!");
}).await;
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnBackgroundTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn<F, T>(&self, fut: F) -> Task<T>
where
F: Future<Output = T> + Send + 'static,
T: Send + 'static;
/**
Spawns the given thread-local future on the current executor.
Note that this future will run detached and always to completion,
preventing the [`Scheduler`] was spawned on from completing until done.
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnLocalTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn_local(async move {
println!("Hello from local task!");
});
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnLocalTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn_local<F>(&self, fut: F)
where
F: Future<Output = ()> + 'static;
/**
Spawns the given blocking function and returns its [`Task`].
This function will run on a separate thread pool and not block the current executor.
# Panics
Panics if called outside of a running [`Scheduler`].
# Example usage
```rust
use async_io::block_on;
use mlua::prelude::*;
use mlua_luau_scheduler::*;
fn main() -> LuaResult<()> {
let lua = Lua::new();
lua.globals().set(
"spawnBlockingTask",
lua.create_async_function(|lua, ()| async move {
lua.spawn_blocking(|| {
println!("Hello from blocking task!");
}).await;
Ok(())
})?
)?;
let sched = Scheduler::new(&lua);
sched.push_thread_front(lua.load("spawnBlockingTask()"), ());
block_on(sched.run());
Ok(())
}
```
*/
fn spawn_blocking<F, T>(&self, f: F) -> Task<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static;
}
impl<'lua> LuaSchedulerExt<'lua> for Lua {
fn set_exit_code(&self, code: ExitCode) {
let exit = self
.app_data_ref::<Exit>()
.expect("exit code can only be set from within an active scheduler");
exit.set(code);
}
fn push_thread_front(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let queue = self
.app_data_ref::<SpawnedThreadQueue>()
.expect("lua threads can only be pushed from within an active scheduler");
queue.push_item(self, thread, args)
}
fn push_thread_back(
&'lua self,
thread: impl IntoLuaThread<'lua>,
args: impl IntoLuaMulti<'lua>,
) -> LuaResult<ThreadId> {
let queue = self
.app_data_ref::<DeferredThreadQueue>()
.expect("lua threads can only be pushed from within an active scheduler");
queue.push_item(self, thread, args)
}
fn track_thread(&'lua self, id: ThreadId) {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads can only be tracked from within an active scheduler");
map.track(id);
}
fn get_thread_result(&'lua self, id: ThreadId) -> Option<LuaResult<LuaMultiValue<'lua>>> {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads results can only be retrieved from within an active scheduler");
map.remove(id).map(|r| r.value(self))
}
fn wait_for_thread(&'lua self, id: ThreadId) -> impl Future<Output = ()> {
let map = self
.app_data_ref::<ThreadResultMap>()
.expect("lua threads results can only be retrieved from within an active scheduler");
async move { map.listen(id).await }
}
}
impl<'lua> LuaSpawnExt<'lua> for Lua {
fn spawn<F, T>(&self, fut: F) -> Task<T>
where
F: Future<Output = T> + Send + 'static,
T: Send + 'static,
{
let exec = self
.app_data_ref::<WeakArc<Executor>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning future on executor");
exec.spawn(fut)
}
fn spawn_local<F>(&self, fut: F)
where
F: Future<Output = ()> + 'static,
{
let queue = self
.app_data_ref::<WeakRc<FuturesQueue>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning local task on executor");
queue.push_item(fut);
}
fn spawn_blocking<F, T>(&self, f: F) -> Task<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
let exec = self
.app_data_ref::<WeakArc<Executor>>()
.expect("tasks can only be spawned within an active scheduler")
.upgrade()
.expect("executor was dropped");
trace!("spawning blocking task on executor");
exec.spawn(blocking::unblock(f))
}
}

View file

@ -1,148 +0,0 @@
use futures_lite::StreamExt;
use mlua::prelude::*;
use tracing::instrument;
/**
Runs a Lua thread until it manually yields (using coroutine.yield), errors, or completes.
May return `None` if the thread was cancelled.
Otherwise returns the values yielded by the thread, or the error that caused it to stop.
*/
#[instrument(level = "trace", name = "Scheduler::run_until_yield", skip_all)]
pub(crate) async fn run_until_yield<'lua>(
thread: LuaThread<'lua>,
args: LuaMultiValue<'lua>,
) -> Option<LuaResult<LuaMultiValue<'lua>>> {
let mut stream = thread.into_async(args);
/*
NOTE: It is very important that we drop the thread/stream as
soon as we are done, it takes up valuable Lua registry space
and detached tasks will not drop until the executor does
https://github.com/smol-rs/smol/issues/294
We also do not unwrap here since returning `None` is expected behavior for cancellation.
Even though we are converting into a stream, and then immediately running it,
the future may still be cancelled before it is polled, which gives us None.
*/
stream.next().await
}
/**
Checks if the given [`LuaValue`] is the async `POLL_PENDING` constant.
*/
#[inline]
pub(crate) fn is_poll_pending(value: &LuaValue) -> bool {
value
.as_light_userdata()
.map(|l| l == Lua::poll_pending())
.unwrap_or_default()
}
/**
Representation of a [`LuaResult`] with an associated [`LuaMultiValue`] currently stored in the Lua registry.
*/
#[derive(Debug)]
pub(crate) struct ThreadResult {
inner: LuaResult<LuaRegistryKey>,
}
impl ThreadResult {
pub fn new(result: LuaResult<LuaMultiValue>, lua: &Lua) -> Self {
Self {
inner: match result {
Ok(v) => Ok({
let vec = v.into_vec();
lua.create_registry_value(vec).expect("out of memory")
}),
Err(e) => Err(e),
},
}
}
pub fn value(self, lua: &Lua) -> LuaResult<LuaMultiValue> {
match self.inner {
Ok(key) => {
let vec = lua.registry_value(&key).unwrap();
lua.remove_registry_value(key).unwrap();
Ok(LuaMultiValue::from_vec(vec))
}
Err(e) => Err(e.clone()),
}
}
}
/**
Representation of a [`LuaThread`] with its associated arguments currently stored in the Lua registry.
*/
#[derive(Debug)]
pub(crate) struct ThreadWithArgs {
key_thread: LuaRegistryKey,
key_args: LuaRegistryKey,
}
impl ThreadWithArgs {
pub fn new<'lua>(
lua: &'lua Lua,
thread: LuaThread<'lua>,
args: LuaMultiValue<'lua>,
) -> LuaResult<Self> {
let argsv = args.into_vec();
let key_thread = lua.create_registry_value(thread)?;
let key_args = lua.create_registry_value(argsv)?;
Ok(Self {
key_thread,
key_args,
})
}
pub fn into_inner(self, lua: &Lua) -> (LuaThread<'_>, LuaMultiValue<'_>) {
let thread = lua.registry_value(&self.key_thread).unwrap();
let argsv = lua.registry_value(&self.key_args).unwrap();
let args = LuaMultiValue::from_vec(argsv);
lua.remove_registry_value(self.key_thread).unwrap();
lua.remove_registry_value(self.key_args).unwrap();
(thread, args)
}
}
/**
Wrapper struct to accept either a Lua thread or a Lua function as function argument.
[`LuaThreadOrFunction::into_thread`] may be used to convert the value into a Lua thread.
*/
#[derive(Clone)]
pub(crate) enum LuaThreadOrFunction<'lua> {
Thread(LuaThread<'lua>),
Function(LuaFunction<'lua>),
}
impl<'lua> LuaThreadOrFunction<'lua> {
pub(super) fn into_thread(self, lua: &'lua Lua) -> LuaResult<LuaThread<'lua>> {
match self {
Self::Thread(t) => Ok(t),
Self::Function(f) => lua.create_thread(f),
}
}
}
impl<'lua> FromLua<'lua> for LuaThreadOrFunction<'lua> {
fn from_lua(value: LuaValue<'lua>, _: &'lua Lua) -> LuaResult<Self> {
match value {
LuaValue::Thread(t) => Ok(Self::Thread(t)),
LuaValue::Function(f) => Ok(Self::Function(f)),
value => Err(LuaError::FromLuaConversionError {
from: value.type_name(),
to: "LuaThreadOrFunction",
message: Some("Expected thread or function".to_string()),
}),
}
}
}

View file

@ -1,9 +0,0 @@
column_width = 100
line_endings = "Unix"
indent_type = "Tabs"
indent_width = 4
quote_style = "AutoPreferDouble"
call_parentheses = "Always"
[sort_requires]
enabled = true