Remove old docs, move typedefs dir

This commit is contained in:
Filip Tibell 2023-07-22 14:44:28 +02:00
parent 4cea07675c
commit 57f730e788
No known key found for this signature in database
36 changed files with 31 additions and 2456 deletions

View file

@ -1,5 +0,0 @@
root: ./docs
structure:
readme: ./README.md
summary: ./SUMMARY.md

1
.gitignore vendored
View file

@ -7,7 +7,6 @@
/bin
/target
/gitbook
# Autogenerated files

View file

@ -9,22 +9,3 @@ run-file FILE_NAME:
# Run tests for the Lune library
test:
cargo test --lib -- --test-threads 1
# Generate gitbook directory
generate-gitbook:
rm -rf ./gitbook
mkdir gitbook
mkdir gitbook/docs
cp -R docs gitbook
cp README.md gitbook/docs/README.md
cp .gitbook.yaml gitbook/.gitbook.yaml
rm -rf gitbook/docs/typedefs
cargo run -- --generate-gitbook-dir
# Publish gitbook directory to gitbook branch
publish-gitbook: generate-gitbook
npx push-dir --dir=gitbook --branch=gitbook

View file

@ -4,7 +4,7 @@
"luau-lsp.types.roblox": false,
"luau-lsp.require.mode": "relativeToFile",
"luau-lsp.require.directoryAliases": {
"@lune/": "./docs/typedefs/"
"@lune/": "~/.lune/.typedefs/0.7.4/"
},
// Luau - ignore type defs file in docs dir and dev scripts we use
"luau-lsp.ignoreGlobs": [

View file

@ -267,7 +267,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Added support for instance tags & `CollectionService` in the `roblox` built-in. <br />
Currently implemented methods are listed on the [docs site](https://lune.gitbook.io/lune/roblox/api-status).
Currently implemented methods are listed on the [docs site](https://lune-org.github.io/docs/roblox/4-api-status).
### Fixed
@ -305,7 +305,7 @@ This release adds some new features and fixes for the `roblox` built-in.
- Added a `roblox` built-in
If you're familiar with [Remodel](https://github.com/rojo-rbx/remodel), this new built-in contains more or less the same APIs, integrated into Lune. <br />
There are just too many new APIs to list in this changelog, so head over to the [docs sit](https://lune.gitbook.io/lune/roblox/intro) to learn more!
There are just too many new APIs to list in this changelog, so head over to the [docs sit](https://lune-org.github.io/docs/roblox/1-introduction) to learn more!
- Added a `serde` built-in

121
Cargo.lock generated
View file

@ -198,12 +198,6 @@ version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]]
name = "beef"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -287,12 +281,6 @@ version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bytecount"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c"
[[package]]
name = "byteorder"
version = "0.5.3"
@ -416,12 +404,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2"
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "cookie"
version = "0.15.2"
@ -475,19 +457,6 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version 0.4.0",
"syn 1.0.109",
]
[[package]]
name = "dialoguer"
version = "0.10.4"
@ -672,34 +641,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "full_moon"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b9a9bf5e42aec08f4b59be1438d66b01ab0a0f51dca309626e219697b60871c"
dependencies = [
"bytecount",
"cfg-if",
"derive_more",
"full_moon_derive",
"logos",
"paste",
"serde",
"smol_str",
]
[[package]]
name = "full_moon_derive"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99b4bd12ce56927d1dc5478d21528ea8c4b93ca85ff8f8043b6a5351a2a3c6f7"
dependencies = [
"indexmap 1.9.3",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "futures-channel"
version = "0.3.28"
@ -1106,29 +1047,6 @@ version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "logos"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8b031682c67a8e3d5446840f9573eb7fe26efe7ec8d195c9ac4c0647c502f1"
dependencies = [
"logos-derive",
]
[[package]]
name = "logos-derive"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d849148dbaf9661a6151d1ca82b13bb4c4c128146a88d05253b38d4e2f496c"
dependencies = [
"beef",
"fnv",
"proc-macro2",
"quote",
"regex-syntax 0.6.29",
"syn 1.0.109",
]
[[package]]
name = "luau0-src"
version = "0.5.11+luau583"
@ -1151,7 +1069,6 @@ dependencies = [
"directories",
"dunce",
"env_logger 0.10.0",
"full_moon",
"futures-util",
"glam",
"hyper",
@ -1647,7 +1564,7 @@ dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax 0.7.4",
"regex-syntax",
]
[[package]]
@ -1658,15 +1575,9 @@ checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.7.4",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.7.4"
@ -1779,16 +1690,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
dependencies = [
"semver 0.9.0",
]
[[package]]
name = "rustc_version"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver 1.0.18",
"semver",
]
[[package]]
@ -1896,12 +1798,6 @@ dependencies = [
"semver-parser",
]
[[package]]
name = "semver"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918"
[[package]]
name = "semver-parser"
version = "0.7.0"
@ -2040,15 +1936,6 @@ version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
[[package]]
name = "smol_str"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9"
dependencies = [
"serde",
]
[[package]]
name = "socket2"
version = "0.4.9"
@ -2087,7 +1974,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5"
dependencies = [
"discard",
"rustc_version 0.2.3",
"rustc_version",
"stdweb-derive",
"stdweb-internal-macros",
"stdweb-internal-runtime",

View file

@ -24,7 +24,6 @@ cli = [
"dep:env_logger",
"dep:itertools",
"dep:clap",
"dep:full_moon",
"dep:include_dir",
"dep:regex",
]
@ -111,7 +110,6 @@ env_logger = { optional = true, version = "0.10" }
itertools = { optional = true, version = "0.10" }
clap = { optional = true, version = "4.1", features = ["derive"] }
full_moon = { optional = true, version = "0.18", features = ["roblox"] }
include_dir = { optional = true, version = "0.7.3", features = ["glob"] }
regex = { optional = true, version = "1.7", default-features = false, features = [
"std",

View file

@ -1,24 +0,0 @@
# Summary
## Home
- [Installation](pages/home/Installation.md)
- [Writing Scripts](pages/home/Writing-Scripts.md)
- [Running Scripts](pages/home/Running-Scripts.md)
- [Editor Setup](pages/home/Editor-Setup.md)
## Roblox
- [Introduction](pages/roblox/Introduction.md)
- [Examples](pages/roblox/Examples.md)
- [API Status](pages/roblox/Api-Status.md)
## API Reference
- [fs](pages/api/fs.md)
- [net](pages/api/net.md)
- [process](pages/api/process.md)
- [roblox](pages/api/roblox.md)
- [serde](pages/api/serde.md)
- [stdio](pages/api/stdio.md)
- [task](pages/api/task.md)

View file

@ -1,283 +0,0 @@
--!strict
local fs = require("@lune/fs")
local net = require("@lune/net")
local serde = require("@lune/serde")
local process = require("@lune/process")
local roblox = require("@lune/roblox")
export type LuneDataModel = roblox.DataModel
export type LuneInstance = roblox.Instance
local function getAuthCookieWithFallbacks()
local cookie = roblox.getAuthCookie()
if cookie then
return cookie
end
local cookieFromEnv = process.env.REMODEL_AUTH
if cookieFromEnv and #cookieFromEnv > 0 then
return `.ROBLOSECURITY={cookieFromEnv}`
end
for index, arg in process.args do
if arg == "--auth" then
local cookieFromArgs = process.args[index + 1]
if cookieFromArgs and #cookieFromArgs > 0 then
return `.ROBLOSECURITY={cookieFromArgs}`
end
break
end
end
error([[
Failed to find ROBLOSECURITY cookie for authentication!
Make sure you have logged into studio, or set the ROBLOSECURITY environment variable.
]])
end
local function downloadAssetId(assetId: number)
-- 1. Try to find the auth cookie for the current user
local cookie = getAuthCookieWithFallbacks()
-- 2. Send a request to the asset delivery API,
-- which will respond with cdn download link(s)
local assetApiResponse = net.request({
url = `https://assetdelivery.roblox.com/v2/assetId/{assetId}`,
headers = {
Accept = "application/json",
Cookie = cookie,
},
})
if not assetApiResponse.ok then
error(
string.format(
"Failed to fetch asset download link for asset id %s!\n%s (%s)\n%s",
tostring(assetId),
tostring(assetApiResponse.statusCode),
tostring(assetApiResponse.statusMessage),
tostring(assetApiResponse.body)
)
)
end
-- 3. Make sure we got a valid response body
local assetApiBody = serde.decode("json", assetApiResponse.body)
if type(assetApiBody) ~= "table" then
error(
string.format(
"Asset delivery API returned an invalid response body!\n%s",
assetApiResponse.body
)
)
elseif type(assetApiBody.locations) ~= "table" then
error(
string.format(
"Asset delivery API returned an invalid response body!\n%s",
assetApiResponse.body
)
)
end
-- 4. Grab the first asset download location - we only
-- requested one in our query, so this will be correct
local firstLocation = assetApiBody.locations[1]
if type(firstLocation) ~= "table" then
error(
string.format(
"Asset delivery API returned no download locations!\n%s",
assetApiResponse.body
)
)
elseif type(firstLocation.location) ~= "string" then
error(
string.format(
"Asset delivery API returned no valid download locations!\n%s",
assetApiResponse.body
)
)
end
-- 5. Fetch the place contents from the cdn
local cdnResponse = net.request({
url = firstLocation.location,
headers = {
Cookie = cookie,
},
})
if not cdnResponse.ok then
error(
string.format(
"Failed to download asset with id %s from the Roblox cdn!\n%s (%s)\n%s",
tostring(assetId),
tostring(cdnResponse.statusCode),
tostring(cdnResponse.statusMessage),
tostring(cdnResponse.body)
)
)
end
-- 6. The response body should now be the contents of the asset file
return cdnResponse.body
end
local function uploadAssetId(assetId: number, contents: string)
-- 1. Try to find the auth cookie for the current user
local cookie = getAuthCookieWithFallbacks()
-- 2. Create request headers in advance, we might re-use them for CSRF challenges
local headers = {
["User-Agent"] = "Roblox/WinInet",
["Content-Type"] = "application/octet-stream",
Accept = "application/json",
Cookie = cookie,
}
-- 3. Create and send a request to the upload url
local uploadResponse = net.request({
url = `https://data.roblox.com/Data/Upload.ashx?assetid={assetId}`,
body = contents,
method = "POST",
headers = headers,
})
-- 4. Check if we got a valid response, we might have gotten a CSRF
-- challenge and need to send the request with a token included
if
not uploadResponse.ok
and uploadResponse.statusCode == 403
and uploadResponse.headers["x-csrf-token"] ~= nil
then
headers["X-CSRF-Token"] = uploadResponse.headers["x-csrf-token"]
uploadResponse = net.request({
url = `https://data.roblox.com/Data/Upload.ashx?assetid={assetId}`,
body = contents,
method = "POST",
headers = headers,
})
end
if not uploadResponse.ok then
error(
string.format(
"Failed to upload asset with id %s to Roblox!\n%s (%s)\n%s",
tostring(assetId),
tostring(uploadResponse.statusCode),
tostring(uploadResponse.statusMessage),
tostring(uploadResponse.body)
)
)
end
end
local remodel = {}
--[=[
Load an `rbxl` or `rbxlx` file from the filesystem.
Returns a `DataModel` instance, equivalent to `game` from within Roblox.
]=]
function remodel.readPlaceFile(filePath: string)
local placeFile = fs.readFile(filePath)
local place = roblox.deserializePlace(placeFile)
return place
end
--[=[
Load an `rbxm` or `rbxmx` file from the filesystem.
Note that this function returns a **list of instances** instead of a single instance!
This is because models can contain mutliple top-level instances.
]=]
function remodel.readModelFile(filePath: string)
local modelFile = fs.readFile(filePath)
local model = roblox.deserializeModel(modelFile)
return model
end
--[=[
Reads a place asset from Roblox, equivalent to `remodel.readPlaceFile`.
***NOTE:** This function requires authentication using a ROBLOSECURITY cookie!*
]=]
function remodel.readPlaceAsset(assetId: number)
local contents = downloadAssetId(assetId)
local place = roblox.deserializePlace(contents)
return place
end
--[=[
Reads a model asset from Roblox, equivalent to `remodel.readModelFile`.
***NOTE:** This function requires authentication using a ROBLOSECURITY cookie!*
]=]
function remodel.readModelAsset(assetId: number)
local contents = downloadAssetId(assetId)
local place = roblox.deserializeModel(contents)
return place
end
--[=[
Saves an `rbxl` or `rbxlx` file out of the given `DataModel` instance.
If the instance is not a `DataModel`, this function will throw.
Models should be saved with `writeModelFile` instead.
]=]
function remodel.writePlaceFile(filePath: string, dataModel: LuneDataModel)
local asBinary = string.sub(filePath, -5) == ".rbxl"
local asXml = string.sub(filePath, -6) == ".rbxlx"
assert(asBinary or asXml, "File path must have .rbxl or .rbxlx extension")
local placeFile = roblox.serializePlace(dataModel, asXml)
fs.writeFile(filePath, placeFile)
end
--[=[
Saves an `rbxm` or `rbxmx` file out of the given `Instance`.
If the instance is a `DataModel`, this function will throw.
Places should be saved with `writePlaceFile` instead.
]=]
function remodel.writeModelFile(filePath: string, instance: LuneInstance)
local asBinary = string.sub(filePath, -5) == ".rbxm"
local asXml = string.sub(filePath, -6) == ".rbxmx"
assert(asBinary or asXml, "File path must have .rbxm or .rbxmx extension")
local placeFile = roblox.serializeModel({ instance }, asXml)
fs.writeFile(filePath, placeFile)
end
--[=[
Uploads the given `DataModel` instance to Roblox, overwriting an existing place.
If the instance is not a `DataModel`, this function will throw.
Models should be uploaded with `writeExistingModelAsset` instead.
***NOTE:** This function requires authentication using a ROBLOSECURITY cookie!*
]=]
function remodel.writeExistingPlaceAsset(dataModel: LuneDataModel, assetId: number)
local placeFile = roblox.serializePlace(dataModel)
uploadAssetId(assetId, placeFile)
end
--[=[
Uploads the given instance to Roblox, overwriting an existing model.
If the instance is a `DataModel`, this function will throw.
Places should be uploaded with `writeExistingPlaceAsset` instead.
***NOTE:** This function requires authentication using a ROBLOSECURITY cookie!*
]=]
function remodel.writeExistingModelAsset(instance: LuneInstance, assetId: number)
local modelFile = roblox.serializeModel({ instance })
uploadAssetId(assetId, modelFile)
end
remodel.readFile = fs.readFile
remodel.readDir = fs.readDir
remodel.writeFile = fs.writeFile
remodel.createDirAll = fs.writeDir
remodel.removeFile = fs.removeFile
remodel.removeDir = fs.removeDir
remodel.isFile = fs.isFile
remodel.isDir = fs.isDir
return remodel

View file

@ -1,22 +0,0 @@
# 🧑‍💻 Configuring VSCode and tooling for Lune
Lune puts developer experience first, and as such provides type definitions and configurations for several tools out of the box.
These steps assume you have already installed Lune and that it is available to run in the current directory.
## Luau LSP
1. Run `lune --setup` to generate Luau type definitions for your installed version of Lune
2. Verify that type definition files have been generated
3. Modify your VSCode settings, either by using the settings menu or in `settings.json`:
```json
"luau-lsp.require.mode": "relativeToFile", // Set the require mode to work with Lune
"luau-lsp.require.fileAliases": { // Add type definitions for Lune builtins
"@lune/fs": ".../.lune/.typedefs/x.y.z/fs.luau",
"@lune/net": ".../.lune/.typedefs/x.y.z/net.luau",
"@lune/...": "..."
}
```
_**NOTE:** If you already had a `.vscode/settings.json` file in your current directory the type definition files may have been added automatically!_

View file

@ -1,36 +0,0 @@
<!-- markdownlint-disable MD033 -->
# ⚙️ Installation
The preferred way of installing Lune is using [Aftman](https://github.com/lpghatguy/aftman).
Running this command in your terminal will add `lune` to an `aftman.toml` file in the current directory, or create one if it does not exist:
```sh
aftman add filiptibell/lune
```
## Other options
### Building from source
Building and installing from source requires the latest version of [Rust & Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html) to be installed on your system. <br />
Once installed, run the following command in your terminal:
```sh
cargo install lune --locked
```
Note that Lune does not make any minimum supported rust version (MSRV) guarantees and you may need to upgrade your version of Rust to update Lune in the future.
### Using GitHub Releases
You can download pre-built binaries for most systems directly from the [GitHub Releases](https://github.com/filiptibell/lune/releases) page. <br />
There are many tools that can install binaries directly from releases, and it is up to you to choose what tool to use when installing here.
## Next steps
Congratulations! You've installed Lune and are now ready to write your first script.
- If you want to write standalone scripts, head over to the [Writing Scripts](https://lune.gitbook.io/lune/home/writing-scripts) page.
- If you want to write Lune scripts specifically for Roblox, check out the [Roblox](https://lune.gitbook.io/lune/roblox/intro) section.

View file

@ -1,51 +0,0 @@
<!-- markdownlint-disable MD033 -->
# 🏃 Running Lune Scripts
After you've written a script file, for example `script-name.luau`, you can run it:
```sh
lune script-name
```
This will look for the file `script-name.luau`**_<sup>[1]</sup>_** in a few locations:
- The current directory
- The folder `lune` in the current directory, if it exists
- The folder `.lune` in the current directory, if it exists
- The folder `lune` in the _home_ directory, if it exists
- The folder `.lune` in the _home_ directory, if it exists
## 🎛️ Passing Command-Line Arguments
Arguments can be passed to a Lune script directory from the command line when running it:
```sh
lune script-name arg1 arg2 "argument three"
```
These arguments will then be available in your script using `process.args`:
```lua
print(process.args)
--> { "arg1", "arg2", "argument three" }
```
## 💭 Additional Commands
```sh
lune --list
```
Lists all scripts found in `lune` or `.lune` directories, including any top-level description comments. <br />
Lune description comments are always written at the top of a file and start with a lua-style comment arrow (`-->`).
```sh
lune -
```
Runs a script passed to Lune using stdin. Occasionally useful for running scripts piped to Lune from external sources.
---
**_<sup>[1]</sup>_** _Lune also supports files with the `.lua` extension but using the `.luau` extension is highly recommended. Additionally, if you don't want Lune to look in sub-directories or try to find files with `.lua` / `.luau` extensions at all, you can provide an absolute file path. This will disable all file path parsing and checks, and just run the file directly._

View file

@ -1,326 +0,0 @@
<!-- markdownlint-disable MD033 -->
<!-- markdownlint-disable MD026 -->
# ✏️ Writing Lune Scripts
If you've already written some version of Lua (or Luau) scripts before, this walkthrough will make you feel right at home.
Once you have a script you want to run, head over to the [Running Scripts](https://lune.gitbook.io/lune/home/running-scripts) page.
## Hello, Lune!
```lua
--[[
EXAMPLE #1
Using arguments given to the program
]]
if #process.args > 0 then
print("Got arguments:")
print(process.args)
if #process.args > 3 then
error("Too many arguments!")
end
else
print("Got no arguments ☹️")
end
--[[
EXAMPLE #2
Using the stdio library to prompt for terminal input
]]
local text = stdio.prompt("text", "Please write some text")
print("You wrote '" .. text .. "'!")
local confirmed = stdio.prompt("confirm", "Please confirm that you wrote some text")
if confirmed == false then
error("You didn't confirm!")
else
print("Confirmed!")
end
--[[
EXAMPLE #3
Get & set environment variables
Checks if environment variables are empty or not,
prints out ❌ if empty and ✅ if they have a value
]]
print("Reading current environment 🔎")
-- Environment variables can be read directly
assert(process.env.PATH ~= nil, "Missing PATH")
assert(process.env.PWD ~= nil, "Missing PWD")
-- And they can also be accessed using Luau's generalized iteration (but not pairs())
for key, value in process.env do
local box = if value and value ~= "" then "✅" else "❌"
print(string.format("[%s] %s", box, key))
end
--[[
EXAMPLE #4
Writing a module
Modularizing and splitting up your code is Lune is very straight-forward,
in contrast to other scripting languages and shells such as bash
]]
local module = {}
function module.sayHello()
print("Hello, Lune! 🌙")
end
return module
--[[
EXAMPLE #5
Using a function from another module / script
Lune has path-relative imports, similar to other popular languages such as JavaScript
]]
local module = require("../modules/module")
module.sayHello()
--[[
EXAMPLE #6
Spawning concurrent tasks
These tasks will run at the same time as other Lua code which lets you do primitive multitasking
]]
task.spawn(function()
print("Spawned a task that will run instantly but not block")
task.wait(5)
end)
print("Spawning a delayed task that will run in 5 seconds")
task.delay(5, function()
print("...")
task.wait(1)
print("Hello again!")
task.wait(1)
print("Goodbye again! 🌙")
end)
--[[
EXAMPLE #7
Read files in the current directory
This prints out directory & file names with some fancy icons
]]
print("Reading current dir 🗂️")
local entries = fs.readDir(".")
-- NOTE: We have to do this outside of the sort function
-- to avoid yielding across the metamethod boundary, all
-- of the filesystem APIs are asynchronous and yielding
local entryIsDir = {}
for _, entry in entries do
entryIsDir[entry] = fs.isDir(entry)
end
-- Sort prioritizing directories first, then alphabetically
table.sort(entries, function(entry0, entry1)
if entryIsDir[entry0] ~= entryIsDir[entry1] then
return entryIsDir[entry0]
end
return entry0 < entry1
end)
-- Make sure we got some known files that should always exist
assert(table.find(entries, "Cargo.toml") ~= nil, "Missing Cargo.toml")
assert(table.find(entries, "Cargo.lock") ~= nil, "Missing Cargo.lock")
-- Print the pretty stuff
for _, entry in entries do
if fs.isDir(entry) then
print("📁 " .. entry)
else
print("📄 " .. entry)
end
end
--[[
EXAMPLE #8
Call out to another program / executable
You can also get creative and combine this with example #6 to spawn several programs at the same time!
]]
print("Sending 4 pings to google 🌏")
local result = process.spawn("ping", {
"google.com",
"-c 4",
})
--[[
EXAMPLE #9
Using the result of a spawned process, exiting the process
This looks scary with lots of weird symbols, but, it's just some Lua-style pattern matching
to parse the lines of "min/avg/max/stddev = W/X/Y/Z ms" that the ping program outputs to us
]]
if result.ok then
assert(#result.stdout > 0, "Result output was empty")
local min, avg, max, stddev = string.match(
result.stdout,
"min/avg/max/stddev = ([%d%.]+)/([%d%.]+)/([%d%.]+)/([%d%.]+) ms"
)
print(string.format("Minimum ping time: %.3fms", assert(tonumber(min))))
print(string.format("Maximum ping time: %.3fms", assert(tonumber(max))))
print(string.format("Average ping time: %.3fms", assert(tonumber(avg))))
print(string.format("Standard deviation: %.3fms", assert(tonumber(stddev))))
else
print("Failed to send ping to google!")
print(result.stderr)
process.exit(result.code)
end
--[[
EXAMPLE #10
Using the built-in networking library, encoding & decoding json
]]
print("Sending PATCH request to web API 📤")
local apiResult = net.request({
url = "https://jsonplaceholder.typicode.com/posts/1",
method = "PATCH",
headers = {
["Content-Type"] = "application/json",
},
body = net.jsonEncode({
title = "foo",
body = "bar",
}),
})
if not apiResult.ok then
print("Failed to send network request!")
print(string.format("%d (%s)", apiResult.statusCode, apiResult.statusMessage))
print(apiResult.body)
process.exit(1)
end
type ApiResponse = {
id: number,
title: string,
body: string,
userId: number,
}
local apiResponse: ApiResponse = net.jsonDecode(apiResult.body)
assert(apiResponse.title == "foo", "Invalid json response")
assert(apiResponse.body == "bar", "Invalid json response")
print("Got valid JSON response with changes applied")
--[[
EXAMPLE #11
Using the stdio library to print pretty
]]
print("Printing with pretty colors and auto-formatting 🎨")
print(stdio.color("blue") .. string.rep("—", 22) .. stdio.color("reset"))
print("API response:", apiResponse)
warn({
Oh = {
No = {
TooMuch = {
Nesting = {
"Will not print",
},
},
},
},
})
print(stdio.color("blue") .. string.rep("—", 22) .. stdio.color("reset"))
--[[
EXAMPLE #12
Saying goodbye 😔
]]
print("Goodbye, lune! 🌙")
```
More real-world examples of how to write Lune scripts can be found in the [examples](https://github.com/filiptibell/lune/blob/main/.lune/examples/) folder.
Documentation for individual APIs and types can be found in "API Reference" in the sidebar of this wiki.
## Extras
### 🔀 Example translation from Bash
```bash
#!/bin/bash
VALID=true
COUNT=1
while [ $VALID ]
do
echo $COUNT
if [ $COUNT -eq 5 ];
then
break
fi
((COUNT++))
done
```
**_With Lune & Luau:_**
```lua
local valid = true
local count = 1
while valid do
print(count)
if count == 5 then
break
end
count += 1
end
```

View file

@ -1,76 +0,0 @@
<!-- markdownlint-disable MD041 -->
<!-- markdownlint-disable MD033 -->
# API Status
This is a page indicating the current implementation status for instance methods and datatypes in the `roblox` library.
If an API on a class is not listed here it may not be within the scope for Lune and may not be implemented in the future. <br />
However, if a recently added datatype is missing, and it can be used as an instance property, it is likely that it will be implemented.
## Classes
### `Instance`
Currently implemented APIs:
- [`new`](https://create.roblox.com/docs/reference/engine/datatypes/Instance#new) - note that this does not include the second `parent` argument
- [`AddTag`](https://create.roblox.com/docs/reference/engine/classes/CollectionService#AddTag)
- [`Clone`](https://create.roblox.com/docs/reference/engine/classes/Instance#Clone)
- [`Destroy`](https://create.roblox.com/docs/reference/engine/classes/Instance#Destroy)
- [`ClearAllChildren`](https://create.roblox.com/docs/reference/engine/classes/Instance#ClearAllChildren)
- [`FindFirstAncestor`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstAncestor)
- [`FindFirstAncestorOfClass`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstAncestorOfClass)
- [`FindFirstAncestorWhichIsA`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstAncestorWhichIsA)
- [`FindFirstChild`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstChild)
- [`FindFirstChildOfClass`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstChildOfClass)
- [`FindFirstChildWhichIsA`](https://create.roblox.com/docs/reference/engine/classes/Instance#FindFirstChildWhichIsA)
- [`GetAttribute`](https://create.roblox.com/docs/reference/engine/classes/Instance#GetAttribute)
- [`GetAttributes`](https://create.roblox.com/docs/reference/engine/classes/Instance#GetAttributes)
- [`GetChildren`](https://create.roblox.com/docs/reference/engine/classes/Instance#GetChildren)
- [`GetDescendants`](https://create.roblox.com/docs/reference/engine/classes/Instance#GetDescendants)
- [`GetFullName`](https://create.roblox.com/docs/reference/engine/classes/Instance#GetFullName)
- [`GetTags`](https://create.roblox.com/docs/reference/engine/classes/CollectionService#GetTags)
- [`HasTag`](https://create.roblox.com/docs/reference/engine/classes/CollectionService#HasTag)
- [`IsA`](https://create.roblox.com/docs/reference/engine/classes/Instance#IsA)
- [`IsAncestorOf`](https://create.roblox.com/docs/reference/engine/classes/Instance#IsAncestorOf)
- [`IsDescendantOf`](https://create.roblox.com/docs/reference/engine/classes/Instance#IsDescendantOf)
- [`RemoveTag`](https://create.roblox.com/docs/reference/engine/classes/CollectionService#RemoveTag)
- [`SetAttribute`](https://create.roblox.com/docs/reference/engine/classes/Instance#SetAttribute)
### `DataModel`
Currently implemented APIs:
- [`GetService`](https://create.roblox.com/docs/reference/engine/classes/ServiceProvider#GetService)
- [`FindService`](https://create.roblox.com/docs/reference/engine/classes/ServiceProvider#FindService)
## Datatypes
Currently implemented datatypes:
- [`Axes`](https://create.roblox.com/docs/reference/engine/datatypes/Axes)
- [`BrickColor`](https://create.roblox.com/docs/reference/engine/datatypes/BrickColor)
- [`CFrame`](https://create.roblox.com/docs/reference/engine/datatypes/CFrame)
- [`Color3`](https://create.roblox.com/docs/reference/engine/datatypes/Color3)
- [`ColorSequence`](https://create.roblox.com/docs/reference/engine/datatypes/ColorSequence)
- [`ColorSequenceKeypoint`](https://create.roblox.com/docs/reference/engine/datatypes/ColorSequenceKeypoint)
- [`Enum`](https://create.roblox.com/docs/reference/engine/datatypes/Enum)
- [`Faces`](https://create.roblox.com/docs/reference/engine/datatypes/Faces)
- [`Font`](https://create.roblox.com/docs/reference/engine/datatypes/Font)
- [`NumberRange`](https://create.roblox.com/docs/reference/engine/datatypes/NumberRange)
- [`NumberSequence`](https://create.roblox.com/docs/reference/engine/datatypes/NumberSequence)
- [`NumberSequenceKeypoint`](https://create.roblox.com/docs/reference/engine/datatypes/NumberSequenceKeypoint)
- [`PhysicalProperties`](https://create.roblox.com/docs/reference/engine/datatypes/PhysicalProperties)
- [`Ray`](https://create.roblox.com/docs/reference/engine/datatypes/Ray)
- [`Rect`](https://create.roblox.com/docs/reference/engine/datatypes/Rect)
- [`Region3`](https://create.roblox.com/docs/reference/engine/datatypes/Region3)
- [`Region3int16`](https://create.roblox.com/docs/reference/engine/datatypes/Region3int16)
- [`UDim`](https://create.roblox.com/docs/reference/engine/datatypes/UDim)
- [`UDim2`](https://create.roblox.com/docs/reference/engine/datatypes/UDim2)
- [`Vector2`](https://create.roblox.com/docs/reference/engine/datatypes/Vector2)
- [`Vector2int16`](https://create.roblox.com/docs/reference/engine/datatypes/Vector2int16)
- [`Vector3`](https://create.roblox.com/docs/reference/engine/datatypes/Vector3)
- [`Vector3int16`](https://create.roblox.com/docs/reference/engine/datatypes/Vector3int16)
Note that these datatypes are kept as up-to-date as possible, but recently added members & methods may be missing.

View file

@ -1,78 +0,0 @@
<!-- markdownlint-disable MD033 -->
<!-- markdownlint-disable MD026 -->
# Example Lune Scripts for Roblox
These are a few examples of things you can do using the built-in `roblox` library.
## `1` - Make all parts anchored in a place file
```lua
local roblox = require("@lune/roblox")
-- Read the place file called myPlaceFile.rbxl into a DataModel called "game"
-- This works exactly the same as in Roblox, except "game" does not exist by default - you have to load it from a file!
local game = roblox.readPlaceFile("myPlaceFile.rbxl")
local workspace = game:GetService("Workspace")
-- Make all of the parts in the workspace anchored
for _, descendant in workspace:GetDescendants() do
if descendant:IsA("BasePart") then
descendant.Anchored = true
end
end
-- Save the DataModel (game) back to the file that we read it from
roblox.writePlaceFile("myPlaceFile.rbxl")
```
---
## `2` - Save instances in a place as individual model files
```lua
local roblox = require("@lune/roblox")
local fs = require("@lune/fs")
-- Here we load a file just like in the first example
local game = roblox.readPlaceFile("myPlaceFile.rbxl")
local workspace = game:GetService("Workspace")
-- We use a normal Lune API to make sure a directory exists to save our models in
fs.writeDir("models")
-- Then we save all of our instances in Workspace as model files, in our new directory
-- Note that a model file can actually contain several instances at once, so we pass a table here
for _, child in workspace:GetChildren() do
roblox.writeModelFile("models/" .. child.Name, { child })
end
```
---
## `3` - Make a new place from scratch
```lua
local roblox = require("@lune/roblox")
local Instance = roblox.Instance
-- You can even create a new DataModel using Instance.new, which is not normally possible in Roblox
-- This is normal - most instances that are not normally accessible in Roblox can be manipulated using Lune!
local game = Instance.new("DataModel")
local workspace = game:GetService("Workspace")
-- Here we just make a bunch of models with parts in them for demonstration purposes
for i = 1, 50 do
local model = Instance.new("Model")
model.Name = "Model #" .. tostring(i)
model.Parent = workspace
for j = 1, 4 do
local part = Instance.new("Part")
part.Name = "Part #" .. tostring(j)
part.Parent = model
end
end
-- As always, we have to save the DataModel (game) to a file when we're done
roblox.writePlaceFile("myPlaceWithLotsOfModels.rbxl")
```

View file

@ -1,10 +0,0 @@
<!-- markdownlint-disable MD033 -->
<!-- markdownlint-disable MD026 -->
# ✏️ Writing Lune Scripts for Roblox
Lune has a powerful built-in library and set of APIs for manipulating Roblox place files and model files. It contains APIs for reading & writing files, and gives you instances to use, just as if you were scripting inside of the Roblox engine, albeit with a more limited API.
For examples on how to write Roblox-specific Lune scripts, check out the [Examples](https://lune.gitbook.io/lune/roblox/examples) page.
For a full list of the currently implemented Roblox APIs, check out the [API Status](https://lune.gitbook.io/lune/roblox/api-status) page.

View file

@ -2,14 +2,32 @@ use std::collections::HashMap;
use anyhow::{Context, Result};
use directories::UserDirs;
use futures_util::future::try_join_all;
use include_dir::Dir;
use tokio::fs::{create_dir_all, write};
#[allow(clippy::too_many_lines)]
pub async fn generate_from_type_definitions(
typedef_files: HashMap<String, Vec<u8>>,
) -> Result<String> {
pub async fn generate_typedef_files_from_definitions(dir: &Dir<'_>) -> Result<String> {
let contents = read_typedefs_dir_contents(dir);
write_typedef_files(contents).await
}
fn read_typedefs_dir_contents(dir: &Dir<'_>) -> HashMap<String, Vec<u8>> {
let mut definitions = HashMap::new();
for entry in dir.find("*.luau").unwrap() {
let entry_file = entry.as_file().unwrap();
let entry_name = entry_file.path().file_name().unwrap().to_string_lossy();
let typedef_name = entry_name.trim_end_matches(".luau");
let typedef_contents = entry_file.contents().to_vec();
definitions.insert(typedef_name.to_string(), typedef_contents);
}
definitions
}
async fn write_typedef_files(typedef_files: HashMap<String, Vec<u8>>) -> Result<String> {
let version_string = env!("CARGO_PKG_VERSION");
let mut dirs_to_write = Vec::new();
let mut files_to_write = Vec::new();

View file

@ -1,129 +0,0 @@
use anyhow::{bail, Result};
use super::{
item::{DefinitionsItem, DefinitionsItemFunctionArg, DefinitionsItemFunctionRet},
kind::DefinitionsItemKind,
};
#[derive(Debug, Default, Clone)]
pub struct DefinitionsItemBuilder {
exported: bool,
kind: Option<DefinitionsItemKind>,
typ: Option<String>,
name: Option<String>,
meta: Option<String>,
value: Option<String>,
children: Vec<DefinitionsItem>,
args: Vec<DefinitionsItemFunctionArg>,
rets: Vec<DefinitionsItemFunctionRet>,
}
#[allow(dead_code)]
impl DefinitionsItemBuilder {
pub fn new() -> Self {
Self {
..Default::default()
}
}
#[allow(clippy::wrong_self_convention)]
pub fn as_exported(mut self) -> Self {
self.exported = true;
self
}
pub fn with_kind(mut self, kind: DefinitionsItemKind) -> Self {
self.kind = Some(kind);
self
}
pub fn with_name<S: AsRef<str>>(mut self, name: S) -> Self {
self.name = Some(name.as_ref().to_string());
self
}
pub fn with_type(mut self, typ: String) -> Self {
self.typ = Some(typ);
self
}
pub fn with_meta<S: AsRef<str>>(mut self, meta: S) -> Self {
self.meta = Some(meta.as_ref().to_string());
self
}
pub fn with_value<S: AsRef<str>>(mut self, value: S) -> Self {
self.value = Some(value.as_ref().to_string());
self
}
pub fn with_child(mut self, child: DefinitionsItem) -> Self {
self.children.push(child);
self
}
pub fn with_children(mut self, children: &[DefinitionsItem]) -> Self {
self.children.extend_from_slice(children);
self
}
pub fn with_arg(mut self, arg: DefinitionsItemFunctionArg) -> Self {
self.args.push(arg);
self
}
pub fn with_args(mut self, args: &[DefinitionsItemFunctionArg]) -> Self {
for arg in args {
self.args.push(arg.clone());
}
self
}
pub fn with_ret(mut self, ret: DefinitionsItemFunctionRet) -> Self {
self.rets.push(ret);
self
}
pub fn with_rets(mut self, rets: &[DefinitionsItemFunctionRet]) -> Self {
for ret in rets {
self.rets.push(ret.clone());
}
self
}
pub fn build(self) -> Result<DefinitionsItem> {
if let Some(kind) = self.kind {
let mut children = self.children;
children.sort_by(|left, right| left.name.cmp(&right.name));
Ok(DefinitionsItem {
exported: self.exported,
kind,
typ: self.typ,
name: self.name,
meta: self.meta,
value: self.value,
children,
args: self.args,
rets: self.rets,
})
} else {
bail!("Missing doc item kind")
}
}
}
impl From<&DefinitionsItem> for DefinitionsItemBuilder {
fn from(value: &DefinitionsItem) -> Self {
Self {
exported: value.exported,
kind: Some(value.kind),
typ: value.typ.clone(),
name: value.name.clone(),
meta: value.meta.clone(),
value: value.value.clone(),
children: value.children.clone(),
args: value.args.clone(),
rets: value.rets.clone(),
}
}
}

View file

@ -1,164 +0,0 @@
use std::cmp::Ordering;
use serde::{Deserialize, Serialize};
use super::kind::DefinitionsItemKind;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DefinitionsItemFunctionArg {
pub name: String,
pub typedef: String,
pub typedef_simple: String,
}
impl DefinitionsItemFunctionArg {
pub fn new<N, T, TS>(name: N, typedef: T, typedef_simple: TS) -> Self
where
N: Into<String>,
T: Into<String>,
TS: Into<String>,
{
Self {
name: name.into(),
typedef: typedef.into(),
typedef_simple: typedef_simple.into(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct DefinitionsItemFunctionRet {
pub typedef: String,
pub typedef_simple: String,
}
impl DefinitionsItemFunctionRet {
pub fn new<T, TS>(typedef: T, typedef_simple: TS) -> Self
where
T: Into<String>,
TS: Into<String>,
{
Self {
typedef: typedef.into(),
typedef_simple: typedef_simple.into(),
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DefinitionsItem {
#[serde(skip_serializing_if = "skip_serialize_is_false")]
pub(super) exported: bool,
pub(super) kind: DefinitionsItemKind,
pub(super) typ: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) meta: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) value: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(super) children: Vec<DefinitionsItem>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(super) args: Vec<DefinitionsItemFunctionArg>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub(super) rets: Vec<DefinitionsItemFunctionRet>,
}
#[allow(clippy::trivially_copy_pass_by_ref)]
fn skip_serialize_is_false(b: &bool) -> bool {
!b
}
impl PartialOrd for DefinitionsItem {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match self.kind.partial_cmp(&other.kind).unwrap() {
Ordering::Equal => {}
ord => return Some(ord),
}
match self.name.partial_cmp(&other.name).unwrap() {
Ordering::Equal => {}
ord => return Some(ord),
}
match (&self.value, &other.value) {
(Some(value_self), Some(value_other)) => {
match value_self.partial_cmp(value_other).unwrap() {
Ordering::Equal => {}
ord => return Some(ord),
}
}
(Some(_), None) => return Some(Ordering::Less),
(None, Some(_)) => return Some(Ordering::Greater),
(None, None) => {}
}
Some(Ordering::Equal)
}
}
#[allow(dead_code)]
impl DefinitionsItem {
pub fn is_exported(&self) -> bool {
self.exported
}
pub fn is_root(&self) -> bool {
self.kind.is_root()
}
pub fn is_type(&self) -> bool {
self.kind.is_type()
}
pub fn is_table(&self) -> bool {
self.kind.is_table()
}
pub fn is_property(&self) -> bool {
self.kind.is_property()
}
pub fn is_function(&self) -> bool {
self.kind.is_function()
}
pub fn is_description(&self) -> bool {
self.kind.is_description()
}
pub fn is_tag(&self) -> bool {
self.kind.is_tag()
}
pub fn kind(&self) -> DefinitionsItemKind {
self.kind
}
pub fn get_name(&self) -> Option<&str> {
self.name.as_deref()
}
pub fn get_type(&self) -> Option<String> {
self.typ.clone()
}
pub fn get_meta(&self) -> Option<&str> {
self.meta.as_deref()
}
pub fn get_value(&self) -> Option<&str> {
self.value.as_deref()
}
pub fn children(&self) -> &[DefinitionsItem] {
&self.children
}
pub fn args(&self) -> Vec<&DefinitionsItemFunctionArg> {
self.args.iter().collect()
}
pub fn rets(&self) -> Vec<&DefinitionsItemFunctionRet> {
self.rets.iter().collect()
}
}

View file

@ -1,64 +0,0 @@
use std::fmt;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub enum DefinitionsItemKind {
Root,
Type,
Table,
Property,
Function,
Description,
Tag,
}
#[allow(dead_code)]
impl DefinitionsItemKind {
pub fn is_root(self) -> bool {
self == DefinitionsItemKind::Root
}
pub fn is_type(self) -> bool {
self == DefinitionsItemKind::Type
}
pub fn is_table(self) -> bool {
self == DefinitionsItemKind::Table
}
pub fn is_property(self) -> bool {
self == DefinitionsItemKind::Property
}
pub fn is_function(self) -> bool {
self == DefinitionsItemKind::Function
}
pub fn is_description(self) -> bool {
self == DefinitionsItemKind::Description
}
pub fn is_tag(self) -> bool {
self == DefinitionsItemKind::Tag
}
}
impl fmt::Display for DefinitionsItemKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Root => "Root",
Self::Type => "Type",
Self::Table => "Table",
Self::Property => "Property",
Self::Function => "Function",
Self::Description => "Description",
Self::Tag => "Tag",
}
)
}
}

View file

@ -1,14 +0,0 @@
mod builder;
mod item;
mod kind;
mod moonwave;
mod parser;
mod tag;
mod tree;
mod type_info_ext;
pub use builder::DefinitionsItemBuilder;
pub use item::DefinitionsItem;
pub use kind::DefinitionsItemKind;
pub use tag::DefinitionsItemTag;
pub use tree::DefinitionsTree;

View file

@ -1,85 +0,0 @@
use regex::Regex;
use super::{builder::DefinitionsItemBuilder, item::DefinitionsItem, kind::DefinitionsItemKind};
fn should_separate_tag_meta(tag_kind: &str) -> bool {
matches!(tag_kind.trim().to_ascii_lowercase().as_ref(), "param")
}
fn parse_moonwave_style_tag(line: &str) -> Option<DefinitionsItem> {
let tag_regex = Regex::new(r#"^@(\S+)\s*(.*)$"#).unwrap();
if tag_regex.is_match(line) {
let captures = tag_regex.captures(line).unwrap();
let tag_kind = captures.get(1).unwrap().as_str();
let tag_rest = captures.get(2).unwrap().as_str();
let mut tag_words = tag_rest.split_whitespace().collect::<Vec<_>>();
let tag_name = if !tag_words.is_empty() && should_separate_tag_meta(tag_kind) {
tag_words.remove(0).to_string()
} else {
String::new()
};
let tag_contents = tag_words.join(" ");
if tag_kind.is_empty() {
None
} else {
let mut builder = DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Tag)
.with_name(tag_kind);
if !tag_name.is_empty() {
builder = builder.with_meta(tag_name);
}
if !tag_contents.is_empty() {
builder = builder.with_value(tag_contents);
}
Some(builder.build().unwrap())
}
} else {
None
}
}
pub(super) fn parse_moonwave_style_comment(comment: &str) -> Vec<DefinitionsItem> {
let no_tabs = comment.replace('\t', " ");
let lines = no_tabs.split('\n').collect::<Vec<_>>();
let indent_len =
lines.iter().fold(usize::MAX, |acc, line| {
let first = line.chars().enumerate().find_map(|(idx, ch)| {
if ch.is_whitespace() {
None
} else {
Some(idx)
}
});
if let Some(first_non_whitespace) = first {
acc.min(first_non_whitespace)
} else {
acc
}
});
let unindented_lines = lines.iter().map(|line| {
if line.chars().any(|c| !c.is_whitespace()) {
&line[indent_len..]
} else {
line
}
});
let mut doc_items = Vec::new();
let mut doc_lines = Vec::new();
for line in unindented_lines {
if let Some(tag) = parse_moonwave_style_tag(line) {
doc_items.push(tag);
} else {
doc_lines.push(line);
}
}
if !doc_lines.is_empty() {
doc_items.push(
DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Description)
.with_value(doc_lines.join("\n"))
.build()
.unwrap(),
);
}
doc_items
}

View file

@ -1,159 +0,0 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use anyhow::{Context, Result};
use full_moon::{
ast::{
types::{TypeFieldKey, TypeInfo},
Stmt,
},
tokenizer::{TokenReference, TokenType},
};
use super::{
builder::DefinitionsItemBuilder, item::DefinitionsItem, moonwave::parse_moonwave_style_comment,
type_info_ext::TypeInfoExt, DefinitionsItemKind,
};
#[derive(Debug, Clone)]
struct DefinitionsParserItem {
name: String,
comment: Option<String>,
type_info: TypeInfo,
}
#[derive(Debug, Clone)]
pub struct DefinitionsParser {
found_top_level_items: BTreeMap<String, DefinitionsParserItem>,
found_top_level_types: HashMap<String, TypeInfo>,
found_top_level_comments: HashMap<String, Option<String>>,
found_top_level_exports: Vec<String>,
}
impl DefinitionsParser {
pub fn new() -> Self {
Self {
found_top_level_items: BTreeMap::new(),
found_top_level_types: HashMap::new(),
found_top_level_comments: HashMap::new(),
found_top_level_exports: Vec::new(),
}
}
/**
Parses the given Luau type definitions into parser items.
The parser items will be stored internally and can be converted
into usable definition items using [`DefinitionsParser::drain`].
*/
pub fn parse<S>(&mut self, contents: S) -> Result<()>
where
S: AsRef<str>,
{
// Parse contents into top-level parser items for later use
let mut found_top_level_items = BTreeMap::new();
let mut found_top_level_types = HashMap::new();
let mut found_top_level_comments = HashMap::new();
let mut found_top_level_exports = HashSet::new();
let ast =
full_moon::parse(contents.as_ref()).context("Failed to parse type definitions")?;
for stmt in ast.nodes().stmts() {
if let Some((exported, declaration, token_reference)) = match stmt {
Stmt::ExportedTypeDeclaration(exp) => {
Some((true, exp.type_declaration(), exp.export_token()))
}
Stmt::TypeDeclaration(typ) => Some((false, typ, typ.type_token())),
_ => None,
} {
let name = declaration.type_name().token().to_string();
let comment = find_token_moonwave_comment(token_reference);
found_top_level_items.insert(
name.clone(),
DefinitionsParserItem {
name: name.clone(),
comment: comment.clone(),
type_info: declaration.type_definition().clone(),
},
);
found_top_level_types.insert(name.clone(), declaration.type_definition().clone());
found_top_level_comments.insert(name.clone(), comment);
if exported {
found_top_level_exports.insert(name);
}
}
}
// Store results
self.found_top_level_items = found_top_level_items;
self.found_top_level_types = found_top_level_types;
self.found_top_level_comments = found_top_level_comments;
self.found_top_level_exports = found_top_level_exports.into_iter().collect();
Ok(())
}
fn convert_parser_item_into_doc_item(
&self,
item: DefinitionsParserItem,
kind: Option<DefinitionsItemKind>,
) -> DefinitionsItem {
let mut builder = DefinitionsItemBuilder::new()
.with_kind(kind.unwrap_or_else(|| item.type_info.parse_definitions_kind()))
.with_name(&item.name)
.with_type(item.type_info.to_string());
if self.found_top_level_exports.contains(&item.name) {
builder = builder.as_exported();
}
if let Some(comment) = item.comment {
builder = builder.with_children(&parse_moonwave_style_comment(&comment));
}
if let Some(args) = item
.type_info
.extract_args_normalized(&self.found_top_level_types)
{
builder = builder.with_args(&args);
}
if let TypeInfo::Table { fields, .. } = item.type_info {
for field in fields.iter() {
if let TypeFieldKey::Name(name) = field.key() {
builder = builder.with_child(self.convert_parser_item_into_doc_item(
DefinitionsParserItem {
name: name.token().to_string(),
comment: find_token_moonwave_comment(name),
type_info: field.value().clone(),
},
None,
));
}
}
}
builder.build().unwrap()
}
/**
Converts currently stored parser items into definition items.
This will consume (drain) all stored parser items, leaving the parser empty.
*/
#[allow(clippy::unnecessary_wraps)]
pub fn drain(&mut self) -> Result<Vec<DefinitionsItem>> {
let mut resulting_items = Vec::new();
for top_level_item in self.found_top_level_items.values() {
resulting_items
.push(self.convert_parser_item_into_doc_item(top_level_item.clone(), None));
}
self.found_top_level_items = BTreeMap::new();
self.found_top_level_types = HashMap::new();
self.found_top_level_comments = HashMap::new();
self.found_top_level_exports = Vec::new();
Ok(resulting_items)
}
}
fn find_token_moonwave_comment(token: &TokenReference) -> Option<String> {
token
.leading_trivia()
.filter_map(|trivia| match trivia.token_type() {
TokenType::MultiLineComment { blocks, comment } if blocks == &1 => Some(comment),
_ => None,
})
.last()
.map(ToString::to_string)
}

View file

@ -1,102 +0,0 @@
use anyhow::{bail, Context, Result};
use serde::{Deserialize, Serialize};
use super::item::DefinitionsItem;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub enum DefinitionsItemTag {
Class(String),
Type(String),
Within(String),
Param((String, String)),
Return(String),
MustUse,
ReadOnly,
ReadWrite,
}
#[allow(dead_code)]
impl DefinitionsItemTag {
pub fn is_class(&self) -> bool {
matches!(self, Self::Class(_))
}
pub fn is_type(&self) -> bool {
matches!(self, Self::Class(_))
}
pub fn is_within(&self) -> bool {
matches!(self, Self::Within(_))
}
pub fn is_param(&self) -> bool {
matches!(self, Self::Param(_))
}
pub fn is_return(&self) -> bool {
matches!(self, Self::Return(_))
}
pub fn is_must_use(&self) -> bool {
self == &Self::MustUse
}
pub fn is_read_only(&self) -> bool {
self == &Self::ReadOnly
}
pub fn is_read_write(&self) -> bool {
self == &Self::ReadWrite
}
}
impl TryFrom<&DefinitionsItem> for DefinitionsItemTag {
type Error = anyhow::Error;
fn try_from(value: &DefinitionsItem) -> Result<Self> {
if let Some(name) = value.get_name() {
Ok(match name.trim().to_ascii_lowercase().as_ref() {
"class" => Self::Class(
value
.get_value()
.context("Missing class name for class tag")?
.to_string(),
),
"type" => Self::Class(
value
.get_value()
.context("Missing type name for type tag")?
.to_string(),
),
"within" => Self::Within(
value
.get_value()
.context("Missing class name for within tag")?
.to_string(),
),
"param" => Self::Param((
value
.get_meta()
.context("Missing param name for param tag")?
.to_string(),
value
.get_value()
.context("Missing param value for param tag")?
.to_string(),
)),
"return" => Self::Return(
value
.get_value()
.context("Missing description for return tag")?
.to_string(),
),
"must_use" => Self::MustUse,
"read_only" => Self::ReadOnly,
"read_write" => Self::ReadWrite,
s => bail!("Unknown docs tag: '{}'", s),
})
} else {
bail!("Doc item has no name")
}
}
}

View file

@ -1,53 +0,0 @@
use std::ops::{Deref, DerefMut};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use super::{
builder::DefinitionsItemBuilder, item::DefinitionsItem, kind::DefinitionsItemKind,
parser::DefinitionsParser,
};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct DefinitionsTree(DefinitionsItem);
#[allow(dead_code)]
impl DefinitionsTree {
pub fn from_type_definitions<S: AsRef<str>>(type_definitions_contents: S) -> Result<Self> {
let mut parser = DefinitionsParser::new();
parser
.parse(type_definitions_contents)
.context("Failed to parse type definitions AST")?;
let top_level_definition_items = parser
.drain()
.context("Failed to convert parser items into definition items")?;
let root = DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Root)
.with_name("<<<ROOT>>>")
.with_children(&top_level_definition_items)
.build()?;
Ok(Self(root))
}
#[allow(clippy::unused_self)]
pub fn is_root(&self) -> bool {
true
}
pub fn into_inner(self) -> DefinitionsItem {
self.0
}
}
impl Deref for DefinitionsTree {
type Target = DefinitionsItem;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for DefinitionsTree {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}

View file

@ -1,333 +0,0 @@
use std::collections::HashMap;
use full_moon::{
ast::types::{TypeArgument, TypeInfo},
tokenizer::{Symbol, Token, TokenReference, TokenType},
ShortString,
};
use super::{
item::{DefinitionsItemFunctionArg, DefinitionsItemFunctionRet},
kind::DefinitionsItemKind,
};
pub(crate) trait TypeInfoExt {
fn is_fn(&self) -> bool;
fn parse_definitions_kind(&self) -> DefinitionsItemKind;
fn stringify_simple(
&self,
parent_typ: Option<&TypeInfo>,
type_lookup_table: &HashMap<String, TypeInfo>,
) -> String;
fn extract_args(&self) -> Vec<TypeArgument>;
fn extract_args_normalized(
&self,
type_lookup_table: &HashMap<String, TypeInfo>,
) -> Option<Vec<DefinitionsItemFunctionArg>>;
// fn extract_rets(&self) -> Vec<TypeArgument>;
// fn extract_rets_normalized(
// &self,
// type_lookup_table: &HashMap<String, TypeInfo>,
// ) -> Option<Vec<DefinitionsItemFunctionRet>>;
}
impl TypeInfoExt for TypeInfo {
/**
Checks if this type represents a function or not.
If the type is a tuple, union, or intersection, it will be checked recursively.
*/
fn is_fn(&self) -> bool {
match self {
TypeInfo::Callback { .. } => true,
TypeInfo::Tuple { types, .. } => types.iter().all(Self::is_fn),
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
left.is_fn() || right.is_fn()
}
_ => false,
}
}
/**
Parses the definitions item kind from the type.
If the type is a tupe, union, or intersection, all the inner types
are required to be equivalent in terms of definitions item kinds.
*/
fn parse_definitions_kind(&self) -> DefinitionsItemKind {
match self {
TypeInfo::Array { .. } | TypeInfo::Table { .. } => DefinitionsItemKind::Table,
TypeInfo::Basic(_) | TypeInfo::String(_) => DefinitionsItemKind::Property,
TypeInfo::Optional { base, .. } => Self::parse_definitions_kind(base.as_ref()),
TypeInfo::Tuple { types, .. } => {
let mut kinds = types
.iter()
.map(Self::parse_definitions_kind)
.collect::<Vec<_>>();
let kinds_all_the_same = kinds.windows(2).all(|w| w[0] == w[1]);
if kinds_all_the_same && !kinds.is_empty() {
kinds.pop().unwrap()
} else {
unimplemented!(
"Missing support for tuple with differing types in type definitions parser",
)
}
}
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
let kind_left = Self::parse_definitions_kind(left.as_ref());
let kind_right = Self::parse_definitions_kind(right.as_ref());
if kind_left == kind_right {
kind_left
} else {
unimplemented!(
"Missing support for union/intersection with differing types in type definitions parser",
)
}
}
typ if typ.is_fn() => DefinitionsItemKind::Function,
typ => unimplemented!(
"Missing support for TypeInfo in type definitions parser:\n{}",
typ.to_string()
),
}
}
/**
Stringifies the type into a simplified type string.
The simplified type string match one of the following formats:
* `any`
* `boolean`
* `string`
* `function`
* `table`
* `CustomTypeName`
* `TypeName?`
* `TypeName | OtherTypeName`
* `{ TypeName }`
* `"string-literal"`
*/
fn stringify_simple(
&self,
parent_typ: Option<&TypeInfo>,
type_lookup_table: &HashMap<String, TypeInfo>,
) -> String {
match self {
TypeInfo::Array { type_info, .. } => {
format!(
"{{ {} }}",
type_info
.as_ref()
.stringify_simple(Some(self), type_lookup_table)
)
}
TypeInfo::Basic(tok) => {
let tok_str = tok.token().to_string().trim().to_string();
let mut any_str = None;
// If the function that contains this arg has generic and a
// generic is the same as this token, we stringify it as any
if let Some(parent) = parent_typ {
if let Some(TypeInfo::Callback {
generics: Some(callback_generics),
..
}) = try_extract_callback_type_info(parent)
{
if callback_generics
.generics()
.iter()
.any(|g| g.to_string() == tok_str)
{
any_str = Some("any".to_string());
}
}
}
// Also check if we got a referenced type, meaning that it
// exists in the lookup table of global types passed to us
if let Some(any_str) = any_str {
any_str
} else if let Some(referenced_typ) = type_lookup_table.get(&tok_str) {
referenced_typ.stringify_simple(None, type_lookup_table)
} else {
tok_str
}
}
TypeInfo::String(str) => str.token().to_string(),
TypeInfo::Boolean(_) => "boolean".to_string(),
TypeInfo::Callback { .. } => "function".to_string(),
TypeInfo::Optional { base, .. } => {
format!(
"{}?",
base.as_ref()
.stringify_simple(Some(self), type_lookup_table)
)
}
TypeInfo::Table { .. } => "table".to_string(),
TypeInfo::Union { left, right, .. } => {
format!(
"{} {} {}",
left.as_ref()
.stringify_simple(Some(self), type_lookup_table),
Symbol::Pipe,
right
.as_ref()
.stringify_simple(Some(self), type_lookup_table)
)
}
// FUTURE: Is there any other type that we can
// stringify to a primitive in an obvious way?
_ => "...".to_string(),
}
}
fn extract_args(&self) -> Vec<TypeArgument> {
if self.is_fn() {
match self {
TypeInfo::Callback { arguments, .. } => {
arguments.iter().cloned().collect::<Vec<_>>()
}
TypeInfo::Tuple { types, .. } => types
.iter()
.next()
.expect("Function tuple type was empty")
.extract_args(),
TypeInfo::Union { left, right, .. }
| TypeInfo::Intersection { left, right, .. } => {
let mut result = Vec::new();
result = merge_type_argument_vecs(result, left.extract_args());
result = merge_type_argument_vecs(result, right.extract_args());
result
}
_ => vec![],
}
} else {
vec![]
}
}
fn extract_args_normalized(
&self,
type_lookup_table: &HashMap<String, TypeInfo>,
) -> Option<Vec<DefinitionsItemFunctionArg>> {
if self.is_fn() {
let args_stringified_not_normalized = self
.extract_args()
.iter()
.map(|type_arg| {
(
type_arg
.name()
.map_or_else(|| "_".to_string(), |n| n.0.to_string()),
type_arg.type_info().to_string(),
type_arg
.type_info()
.stringify_simple(Some(self), type_lookup_table),
)
})
.collect::<Vec<_>>();
let mut args = Vec::new();
for (arg_name, arg_typedef, arg_typedef_simplified) in args_stringified_not_normalized {
args.push(DefinitionsItemFunctionArg::new(
arg_name,
arg_typedef,
normalize_type(&arg_typedef_simplified),
));
}
Some(args)
} else {
None
}
}
}
fn try_extract_callback_type_info(type_info: &TypeInfo) -> Option<&TypeInfo> {
match type_info {
TypeInfo::Callback { .. } => Some(type_info),
TypeInfo::Tuple { types, .. } => types.iter().find_map(try_extract_callback_type_info),
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
try_extract_callback_type_info(left).or_else(|| try_extract_callback_type_info(right))
}
_ => None,
}
}
fn make_empty_type_argument() -> TypeArgument {
TypeArgument::new(TypeInfo::Basic(TokenReference::new(
vec![],
Token::new(TokenType::Symbol {
symbol: Symbol::Nil,
}),
vec![],
)))
}
fn merge_type_arguments(left: TypeArgument, right: TypeArgument) -> TypeArgument {
TypeArgument::new(TypeInfo::Union {
left: Box::new(left.type_info().clone()),
pipe: TokenReference::new(
vec![Token::new(TokenType::Whitespace {
characters: ShortString::new(" "),
})],
Token::new(TokenType::Symbol {
symbol: Symbol::Pipe,
}),
vec![Token::new(TokenType::Whitespace {
characters: ShortString::new(" "),
})],
),
right: Box::new(right.type_info().clone()),
})
}
fn merge_type_argument_vecs(
existing: Vec<TypeArgument>,
new: Vec<TypeArgument>,
) -> Vec<TypeArgument> {
let mut result = Vec::new();
for (index, argument) in new.iter().enumerate() {
if let Some(existing) = existing.get(index) {
result.push(merge_type_arguments(existing.clone(), argument.clone()));
} else {
result.push(merge_type_arguments(
make_empty_type_argument(),
argument.clone(),
));
}
}
result
}
fn normalize_type(simplified: &str) -> String {
let separator = format!(" {} ", Symbol::Pipe);
let arg_parts = simplified.split(&separator).collect::<Vec<_>>();
// Check if we got any optional arg, if so then the entire possible
// union of args will be optional when merged together / normalized
let is_optional = arg_parts
.iter()
.any(|part| part == &"nil" || part.ends_with('?'));
// Get rid of any nils or optional markers since we keep track of it above
let mut arg_parts_no_nils = arg_parts
.iter()
.filter_map(|arg_part| {
if arg_part == &"nil" {
None
} else {
Some(arg_part.trim_end_matches('?'))
}
})
.collect::<Vec<_>>();
arg_parts_no_nils.sort_unstable(); // Sort the args to be able to dedup
arg_parts_no_nils.dedup(); // Deduplicate types that are the exact same shape
if is_optional {
if arg_parts_no_nils.len() > 1 {
// A union of args that is nillable should be enclosed in parens to make
// it more clear that the entire arg is nillable and not just the last type
format!("({})?", arg_parts_no_nils.join(&separator))
} else {
// Just one nillable arg, does not need any parens
format!("{}?", arg_parts_no_nils.first().unwrap())
}
} else {
arg_parts_no_nils.join(&separator)
}
}

View file

@ -1,240 +0,0 @@
use std::{collections::HashMap, fmt::Write, path::PathBuf};
use anyhow::{Context, Result};
use futures_util::future::try_join_all;
use tokio::fs::{create_dir_all, write};
use super::definitions::{
DefinitionsItem, DefinitionsItemBuilder, DefinitionsItemKind, DefinitionsItemTag,
DefinitionsTree,
};
const GENERATED_COMMENT_TAG: &str = "<!-- @generated with lune-cli -->";
#[allow(clippy::too_many_lines)]
pub async fn generate_from_type_definitions(
definitions: HashMap<String, DefinitionsTree>,
) -> Result<()> {
let mut dirs_to_write = Vec::new();
let mut files_to_write = Vec::new();
// Create the gitbook dir at the repo root
let path_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../")
.canonicalize()
.unwrap();
let path_gitbook_dir = path_root.join("gitbook");
let path_gitbook_docs_dir = path_gitbook_dir.join("docs");
let path_gitbook_pages_dir = path_gitbook_docs_dir.join("pages");
let path_gitbook_api_dir = path_gitbook_pages_dir.join("api");
dirs_to_write.push(path_gitbook_dir.clone());
dirs_to_write.push(path_gitbook_docs_dir.clone());
dirs_to_write.push(path_gitbook_pages_dir.clone());
dirs_to_write.push(path_gitbook_api_dir.clone());
// Convert definition trees into single root items so that we can parse and write markdown recursively
let mut typedef_items = HashMap::new();
for (typedef_name, typedef_contents) in definitions {
let main = typedef_contents
.children()
.iter()
.find(
|c| matches!(c.get_name(), Some(s) if s.to_lowercase() == typedef_name.to_lowercase()),
)
.expect("Failed to find main export for generating typedef file");
let children = typedef_contents
.children()
.iter()
.filter_map(|child| {
if child == main {
None
} else {
Some(
DefinitionsItemBuilder::from(child)
.with_kind(DefinitionsItemKind::Type)
.build()
.unwrap(),
)
}
})
.collect::<Vec<_>>();
let root = DefinitionsItemBuilder::new()
.with_kind(main.kind())
.with_name(main.get_name().unwrap())
.with_children(main.children())
.with_children(&children);
let root_item = root.build().expect("Failed to build root definitions item");
typedef_items.insert(typedef_name.to_string(), root_item);
}
// Generate files for all subcategories
for (category_name, category_item) in typedef_items {
let path = path_gitbook_api_dir
.join(category_name.to_ascii_lowercase())
.with_extension("md");
let mut contents = String::new();
write!(contents, "{GENERATED_COMMENT_TAG}\n\n")?;
generate_markdown_documentation(&mut contents, &category_item, None, 0)?;
files_to_write.push((path, post_process_docs(contents)));
}
// Write all dirs and files only when we know generation was successful
let futs_dirs = dirs_to_write
.drain(..)
.map(create_dir_all)
.collect::<Vec<_>>();
let futs_files = files_to_write
.drain(..)
.map(|(path, contents)| write(path, contents))
.collect::<Vec<_>>();
try_join_all(futs_dirs).await?;
try_join_all(futs_files).await?;
Ok(())
}
fn get_name(item: &DefinitionsItem) -> Result<String> {
item.children()
.iter()
.find_map(|child| {
if child.is_tag() {
if let Ok(DefinitionsItemTag::Class(c)) = DefinitionsItemTag::try_from(child) {
Some(c)
} else {
None
}
} else {
None
}
})
.or_else(|| item.get_name().map(ToString::to_string))
.context("Definitions item is missing a name")
}
#[allow(clippy::too_many_lines)]
fn generate_markdown_documentation(
contents: &mut String,
item: &DefinitionsItem,
parent: Option<&DefinitionsItem>,
depth: usize,
) -> Result<()> {
match item.kind() {
DefinitionsItemKind::Type
| DefinitionsItemKind::Table
| DefinitionsItemKind::Property
| DefinitionsItemKind::Function => {
write!(
contents,
"\n{} {}\n",
if item.is_table() { "#" } else { "###" },
get_name(item)?
)?;
}
DefinitionsItemKind::Description => {
let desc = item.get_value().context("Description is missing a value")?;
write!(
contents,
"\n{}\n",
if depth >= 2 {
// HACK: We know our typedefs are formatted like this and
// it looks nicer to have this bolding instead of two
// headers using "###" in the function definition
desc.replace("### Example usage", "**Example usage:**")
} else {
desc.to_string()
}
)?;
}
_ => {}
}
if item.is_function() && !item.args().is_empty() {
let args = item
.args()
.iter()
.map(|arg| format!("{}: {}", arg.name.trim(), arg.typedef.trim()))
.collect::<Vec<_>>()
.join(", ")
.replace("_: T...", "T...");
let func_name = item.get_name().unwrap_or("_");
let parent_name = parent.unwrap().get_name().unwrap_or("_");
let parent_pre = if parent_name.to_lowercase() == "uncategorized" {
String::new()
} else {
format!("{parent_name}.")
};
write!(
contents,
"\n```lua\nfunction {parent_pre}{func_name}({args})\n```\n",
)?;
} else if item.is_type() {
write!(
contents,
"\n```lua\ntype {} = {}\n```\n",
item.get_name().unwrap_or("_"),
item.get_type().unwrap_or_else(|| "{}".to_string()).trim()
)?;
}
let descriptions = item
.children()
.iter()
.filter(|child| child.is_description())
.collect::<Vec<_>>();
let properties = item
.children()
.iter()
.filter(|child| child.is_property())
.collect::<Vec<_>>();
let functions = item
.children()
.iter()
.filter(|child| child.is_function())
.collect::<Vec<_>>();
let types = item
.children()
.iter()
.filter(|child| child.is_type())
.collect::<Vec<_>>();
for description in descriptions {
generate_markdown_documentation(contents, description, Some(item), depth + 1)?;
}
if !item.is_type() {
if !properties.is_empty() {
write!(contents, "\n\n---\n\n## Properties\n\n")?;
}
for property in properties {
generate_markdown_documentation(contents, property, Some(item), depth + 1)?;
}
if !functions.is_empty() {
write!(contents, "\n\n---\n\n## Functions\n\n")?;
}
for function in functions {
generate_markdown_documentation(contents, function, Some(item), depth + 1)?;
}
if !types.is_empty() {
write!(contents, "\n\n---\n\n## Types\n\n")?;
}
for typ in types {
generate_markdown_documentation(contents, typ, Some(item), depth + 1)?;
}
}
Ok(())
}
fn post_process_docs(contents: String) -> String {
let no_empty_lines = contents
.lines()
.map(|line| {
if line.chars().all(char::is_whitespace) {
""
} else {
line
}
})
.collect::<Vec<_>>()
.join("\n");
no_empty_lines
.replace("\n\n---", "\n---")
.replace("\n\n\n", "\n\n")
.replace("\n\n\n", "\n\n")
}

View file

@ -1,54 +0,0 @@
use std::collections::HashMap;
use anyhow::Result;
use include_dir::Dir;
use self::definitions::DefinitionsTree;
mod gitbook_dir;
mod typedef_files;
pub mod definitions;
pub async fn generate_gitbook_dir_from_definitions(dir: &Dir<'_>) -> Result<()> {
let definitions = read_typedefs_dir(dir)?;
gitbook_dir::generate_from_type_definitions(definitions).await
}
pub async fn generate_typedef_files_from_definitions(dir: &Dir<'_>) -> Result<String> {
let contents = read_typedefs_dir_contents(dir);
typedef_files::generate_from_type_definitions(contents).await
}
fn read_typedefs_dir_contents(dir: &Dir<'_>) -> HashMap<String, Vec<u8>> {
let mut definitions = HashMap::new();
for entry in dir.find("*.luau").unwrap() {
let entry_file = entry.as_file().unwrap();
let entry_name = entry_file.path().file_name().unwrap().to_string_lossy();
let typedef_name = entry_name.trim_end_matches(".luau");
let typedef_contents = entry_file.contents().to_vec();
definitions.insert(typedef_name.to_string(), typedef_contents);
}
definitions
}
fn read_typedefs_dir(dir: &Dir<'_>) -> Result<HashMap<String, DefinitionsTree>> {
let mut definitions = HashMap::new();
for entry in dir.find("*.luau").unwrap() {
let entry_file = entry.as_file().unwrap();
let entry_name = entry_file.path().file_name().unwrap().to_string_lossy();
let typedef_name = entry_name.trim_end_matches(".luau");
let typedef_contents = entry_file.contents_utf8().unwrap().to_string();
let typedef_tree = DefinitionsTree::from_type_definitions(&typedef_contents)?;
definitions.insert(typedef_name.to_string(), typedef_tree);
}
Ok(definitions)
}

View file

@ -10,7 +10,7 @@ use serde_json::Value as JsonValue;
use super::gen::generate_typedef_files_from_definitions;
pub(crate) static TYPEDEFS_DIR: Dir<'_> = include_dir!("docs/typedefs");
pub(crate) static TYPEDEFS_DIR: Dir<'_> = include_dir!("types");
pub(crate) static SETTING_NAME_MODE: &str = "luau-lsp.require.mode";
pub(crate) static SETTING_NAME_ALIASES: &str = "luau-lsp.require.directoryAliases";