First draft for new setup & typedefs generation

This commit is contained in:
Filip Tibell 2023-05-08 13:13:34 +02:00
parent fbffda1e65
commit 66aa1fc9ea
No known key found for this signature in database
20 changed files with 330 additions and 763 deletions

15
.vscode/settings.json vendored
View file

@ -26,20 +26,5 @@
},
"[rust]": {
"editor.defaultFormatter": "rust-lang.rust-analyzer"
},
// Ignore temp / gitignored files while editing for a more focused
// workspace, this can be deleted or overridden safely if needed
"files.exclude": {
// Defaults
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/Thumbs.db": true,
// Autogenerate dirs
"bin": true,
"target": true,
"gitbook": true
}
}

18
Cargo.lock generated
View file

@ -468,9 +468,9 @@ dependencies = [
[[package]]
name = "directories"
version = "4.0.1"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210"
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
dependencies = [
"dirs-sys",
]
@ -488,13 +488,14 @@ dependencies = [
[[package]]
name = "dirs-sys"
version = "0.3.7"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users 0.4.3",
"winapi",
"windows-sys 0.48.0",
]
[[package]]
@ -1096,6 +1097,7 @@ dependencies = [
"anyhow",
"clap 4.2.7",
"console",
"directories",
"env_logger 0.10.0",
"full_moon",
"futures-util",
@ -1213,6 +1215,12 @@ version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "os_str_bytes"
version = "6.5.0"

View file

@ -18,6 +18,7 @@ categories = ["command-line-interface"]
# These are declared here to ensure consistent versioning
[workspace.dependencies]
console = "0.15"
directories = "5.0"
futures-util = "0.3"
once_cell = "1.17"

View file

@ -1,5 +1,5 @@
--[=[
@type FsWriteOptions
@type WriteOptions
@within FS
Options for filesystem APIs what write to files and/or directories.
@ -8,7 +8,7 @@
* `overwrite` - If the target path should be overwritten or not, in the case that it already exists
]=]
export type FsWriteOptions = {
export type WriteOptions = {
overwrite: boolean?,
}
@ -163,7 +163,7 @@ export type FS = {
Throws an error if a file or directory already exists at the target path.
This can be bypassed by passing `true` as the third argument, or a dictionary of options.
Refer to the documentation for `FsWriteOptions` for specific option keys and their values.
Refer to the documentation for `WriteOptions` for specific option keys and their values.
An error will be thrown in the following situations:
@ -175,5 +175,5 @@ export type FS = {
@param to The path to move to
@param overwriteOrOptions Options for the target path, such as if should be overwritten if it already exists
]=]
move: (from: string, to: string, overwriteOrOptions: (boolean | FsWriteOptions)?) -> (),
move: (from: string, to: string, overwriteOrOptions: (boolean | WriteOptions)?) -> (),
}

View file

@ -1,28 +0,0 @@
--[=[
Prints given value(s) to stdout.
This will format and prettify values such as tables, numbers, booleans, and more.
]=]
export type print = <T...>(T...) -> ()
--[=[
Prints given value(s) to stdout with a leading `[INFO]` tag.
This will format and prettify values such as tables, numbers, booleans, and more.
]=]
export type printinfo = <T...>(T...) -> ()
--[=[
Prints given value(s) to stdout with a leading `[WARN]` tag.
This will format and prettify values such as tables, numbers, booleans, and more.
]=]
export type warn = <T...>(T...) -> ()
--[=[
Throws an error and prints a formatted version of it with a leading `[ERROR]` tag.
@param message The error message to throw
@param level The stack level to throw the error at, defaults to 0
]=]
export type error = <T>(message: T, level: number?) -> ()

View file

@ -1,7 +1,7 @@
type NetMethod = "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "OPTIONS" | "PATCH"
export type HttpMethod = "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "OPTIONS" | "PATCH"
--[=[
@type NetFetchParams
@type FetchParams
@within Net
Parameters for sending network requests with `net.request`.
@ -14,16 +14,16 @@ type NetMethod = "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "OPTIONS" | "PATCH
* `headers` - A table of key-value pairs representing headers
* `body` - The request body
]=]
export type NetFetchParams = {
export type FetchParams = {
url: string,
method: NetMethod?,
method: HttpMethod?,
query: { [string]: string }?,
headers: { [string]: string }?,
body: string?,
}
--[=[
@type NetFetchResponse
@type FetchResponse
@within new
Response type for sending network requests with `net.request`.
@ -36,7 +36,7 @@ export type NetFetchParams = {
* `headers` - A table of key-value pairs representing headers
* `body` - The request body, or an empty string if one was not given
]=]
export type NetFetchResponse = {
export type FetchResponse = {
ok: boolean,
statusCode: number,
statusMessage: string,
@ -45,7 +45,7 @@ export type NetFetchResponse = {
}
--[=[
@type NetRequest
@type ServeRequest
@within Net
Data type for requests in `net.serve`.
@ -58,16 +58,16 @@ export type NetFetchResponse = {
* `headers` - A table of key-value pairs representing headers
* `body` - The request body, or an empty string if one was not given
]=]
export type NetRequest = {
export type ServeRequest = {
path: string,
query: { [string]: string? },
method: NetMethod,
method: HttpMethod,
headers: { [string]: string },
body: string,
}
--[=[
@type NetRequest
@type ServeResponse
@within Net
Response type for requests in `net.serve`.
@ -78,17 +78,17 @@ export type NetRequest = {
* `headers` - A table of key-value pairs representing headers
* `body` - The response body
]=]
export type NetResponse = {
export type ServeResponse = {
status: number?,
headers: { [string]: string }?,
body: string?,
}
type NetServeHttpHandler = (request: NetRequest) -> string | NetResponse
type NetServeWebSocketHandler = (socket: NetWebSocket) -> ()
type ServeHttpHandler = (request: ServeRequest) -> string | ServeResponse
type ServeWebSocketHandler = (socket: WebSocket) -> ()
--[=[
@type NetServeConfig
@type ServeConfig
@within Net
Configuration for `net.serve`.
@ -96,25 +96,25 @@ type NetServeWebSocketHandler = (socket: NetWebSocket) -> ()
This may contain one of, or both of the following callbacks:
* `handleRequest` for handling normal http requests, equivalent to just passing a function to `net.serve`
* `handleWebSocket` for handling web socket requests, which will receive a `NetWebSocket` object as its first and only parameter
* `handleWebSocket` for handling web socket requests, which will receive a `WebSocket` object as its first and only parameter
]=]
export type NetServeConfig = {
handleRequest: NetServeHttpHandler?,
handleWebSocket: NetServeWebSocketHandler?,
export type ServeConfig = {
handleRequest: ServeHttpHandler?,
handleWebSocket: ServeWebSocketHandler?,
}
--[=[
@type NetServeHandle
@type ServeHandle
@within Net
A handle to a currently running web server, containing a single `stop` function to gracefully shut down the web server.
]=]
export type NetServeHandle = {
export type ServeHandle = {
stop: () -> (),
}
--[=[
@type NetWebSocket
@type WebSocket
@within Net
A reference to a web socket connection.
@ -135,7 +135,7 @@ export type NetServeHandle = {
code according to the [WebSocket specification](https://www.iana.org/assignments/websocket/websocket.xhtml).
This will be an integer between 1000 and 4999, where 1000 is the canonical code for normal, error-free closure.
]=]
export type NetWebSocket = {
export type WebSocket = {
closeCode: number?,
close: (code: number?) -> (),
send: (message: string, asBinaryMessage: boolean?) -> (),
@ -191,7 +191,7 @@ export type Net = {
@param config The URL or request config to use
@return A dictionary representing the response for the request
]=]
request: (config: string | NetFetchParams) -> NetFetchResponse,
request: (config: string | FetchParams) -> FetchResponse,
--[=[
@within Net
@must_use
@ -204,19 +204,19 @@ export type Net = {
@param url The URL to connect to
@return A web socket handle
]=]
socket: (url: string) -> NetWebSocket,
socket: (url: string) -> WebSocket,
--[=[
@within Net
Creates an HTTP server that listens on the given `port`.
This will ***not*** block and will keep listening for requests on the given `port`
until the `stop` function on the returned `NetServeHandle` has been called.
until the `stop` function on the returned `ServeHandle` has been called.
@param port The port to use for the server
@param handlerOrConfig The handler function or config to use for the server
]=]
serve: (port: number, handlerOrConfig: NetServeHttpHandler | NetServeConfig) -> NetServeHandle,
serve: (port: number, handlerOrConfig: ServeHttpHandler | ServeConfig) -> ServeHandle,
--[=[
@within Net
@must_use

View file

@ -1,7 +1,10 @@
type ProcessSpawnOptionsStdio = "inherit" | "default"
export type OS = "linux" | "macos" | "windows"
export type Arch = "x86_64" | "aarch64"
export type SpawnOptionsStdio = "inherit" | "default"
--[=[
@type ProcessSpawnOptions
@type SpawnOptions
@within Process
A dictionary of options for `process.spawn`, with the following available values:
@ -11,15 +14,15 @@ type ProcessSpawnOptionsStdio = "inherit" | "default"
* `shell` - Whether to run in a shell or not - set to `true` to run using the default shell, or a string to run using a specific shell
* `stdio` - How to treat output and error streams from the child process - set to "inherit" to pass output and error streams to the current process
]=]
export type ProcessSpawnOptions = {
export type SpawnOptions = {
cwd: string?,
env: { [string]: string }?,
shell: (boolean | string)?,
stdio: ProcessSpawnOptionsStdio?,
stdio: SpawnOptionsStdio?,
}
--[=[
@type ProcessSpawnResult
@type SpawnResult
@within Process
Result type for child processes in `process.spawn`.
@ -31,7 +34,7 @@ export type ProcessSpawnOptions = {
* `stdout` - The full contents written to stdout by the child process, or an empty string if nothing was written
* `stderr` - The full contents written to stderr by the child process, or an empty string if nothing was written
]=]
export type ProcessSpawnResult = {
export type SpawnResult = {
ok: boolean,
code: number,
stdout: string,
@ -88,7 +91,7 @@ export type Process = {
* `"macos"`
* `"windows"`
]=]
os: "linux" | "macos" | "windows",
os: OS,
--[=[
@within Process
@read_only
@ -100,7 +103,7 @@ export type Process = {
* `"x86_64"`
* `"aarch64"`
]=]
arch: "x86_64" | "aarch64",
arch: Arch,
--[=[
@within Process
@read_only
@ -144,16 +147,12 @@ export type Process = {
The second argument, `params`, can be passed as a list of string parameters to give to the program.
The third argument, `options`, can be passed as a dictionary of options to give to the child process.
Refer to the documentation for `ProcessSpawnOptions` for specific option keys and their values.
Refer to the documentation for `SpawnOptions` for specific option keys and their values.
@param program The program to spawn as a child process
@param params Additional parameters to pass to the program
@param options A dictionary of options for the child process
@return A dictionary representing the result of the child process
]=]
spawn: (
program: string,
params: { string }?,
options: ProcessSpawnOptions?
) -> ProcessSpawnResult,
spawn: (program: string, params: { string }?, options: SpawnOptions?) -> SpawnResult,
}

View file

@ -1,4 +1,4 @@
type SerdeEncodeDecodeFormat = "json" | "yaml" | "toml"
export type EncodeDecodeFormat = "json" | "yaml" | "toml"
--[=[
@class Serde
@ -34,7 +34,7 @@ export type Serde = {
@param pretty If the encoded string should be human-readable, including things such as newlines and spaces. Only supported for json and toml formats, and defaults to false
@return The encoded string
]=]
encode: (format: SerdeEncodeDecodeFormat, value: any, pretty: boolean?) -> string,
encode: (format: EncodeDecodeFormat, value: any, pretty: boolean?) -> string,
--[=[
@within Serde
@must_use
@ -45,5 +45,5 @@ export type Serde = {
@param encoded The string to decode
@return The decoded lua value
]=]
decode: (format: SerdeEncodeDecodeFormat, encoded: string) -> any,
decode: (format: EncodeDecodeFormat, encoded: string) -> any,
}

View file

@ -1,5 +1,14 @@
type StdioColor = "reset" | "black" | "red" | "green" | "yellow" | "blue" | "purple" | "cyan" | "white"
type StdioStyle = "reset" | "bold" | "dim"
export type Color =
"reset"
| "black"
| "red"
| "green"
| "yellow"
| "blue"
| "purple"
| "cyan"
| "white"
export type Style = "reset" | "bold" | "dim"
--[=[
@class Stdio
@ -43,7 +52,7 @@ export type Stdio = {
@param color The color to use
@return A printable ANSI string
]=]
color: (color: StdioColor) -> string,
color: (color: Color) -> string,
--[=[
@within Stdio
@must_use
@ -64,7 +73,7 @@ export type Stdio = {
@param style The style to use
@return A printable ANSI string
]=]
style: (style: StdioStyle) -> string,
style: (style: Style) -> string,
--[=[
@within Stdio
@must_use

View file

@ -23,6 +23,7 @@ roblox = ["lune/roblox"]
lune = { path = "../lib" }
console.workspace = true
directories.workspace = true
futures-util.workspace = true
once_cell.workspace = true
reqwest.workspace = true

View file

@ -6,26 +6,18 @@ use clap::{CommandFactory, Parser};
use include_dir::{include_dir, Dir};
use lune::Lune;
use tokio::{
fs::{read as read_to_vec, write},
fs::read as read_to_vec,
io::{stdin, AsyncReadExt},
};
use crate::{
gen::{
generate_docs_json_from_definitions, generate_gitbook_dir_from_definitions,
generate_luau_defs_from_definitions, generate_selene_defs_from_definitions,
generate_typedefs_file_from_dir,
},
gen::{generate_gitbook_dir_from_definitions, generate_typedef_files_from_definitions},
utils::{
files::{discover_script_file_path_including_lune_dirs, strip_shebang},
listing::{find_lune_scripts, print_lune_scripts, sort_lune_scripts},
},
};
pub(crate) const FILE_NAME_SELENE_TYPES: &str = "lune.yml";
pub(crate) const FILE_NAME_LUAU_TYPES: &str = "luneTypes.d.luau";
pub(crate) const FILE_NAME_DOCS: &str = "luneDocs.json";
pub(crate) static TYPEDEFS_DIR: Dir<'_> = include_dir!("docs/typedefs");
/// A Luau script runner
@ -40,14 +32,17 @@ pub struct Cli {
/// List scripts found inside of a nearby `lune` directory
#[clap(long, short = 'l')]
list: bool,
/// Generate a Luau type definitions file in the current dir
/// Set up type definitions and settings for development
#[clap(long)]
setup: bool,
/// Generate a Luau type definitions file in the current dir
#[clap(long, hide = true)]
generate_luau_types: bool,
/// Generate a Selene type definitions file in the current dir
#[clap(long)]
#[clap(long, hide = true)]
generate_selene_types: bool,
/// Generate a Lune documentation file for Luau LSP
#[clap(long)]
#[clap(long, hide = true)]
generate_docs_file: bool,
/// Generate the full Lune gitbook directory
#[clap(long, hide = true)]
@ -76,18 +71,8 @@ impl Cli {
self
}
pub fn generate_selene_types(mut self) -> Self {
self.generate_selene_types = true;
self
}
pub fn generate_luau_types(mut self) -> Self {
self.generate_luau_types = true;
self
}
pub fn generate_docs_file(mut self) -> Self {
self.generate_docs_file = true;
pub fn setup(mut self) -> Self {
self.setup = true;
self
}
@ -118,7 +103,8 @@ impl Cli {
}
}
// Generate (save) definition files, if wanted
let generate_file_requested = self.generate_luau_types
let generate_file_requested = self.setup
|| self.generate_luau_types
|| self.generate_selene_types
|| self.generate_docs_file
|| self.generate_gitbook_dir;
@ -126,24 +112,12 @@ impl Cli {
if self.generate_gitbook_dir {
generate_gitbook_dir_from_definitions(&TYPEDEFS_DIR).await?;
}
let definitions = generate_typedefs_file_from_dir(&TYPEDEFS_DIR);
if self.generate_luau_types {
generate_and_save_file(FILE_NAME_LUAU_TYPES, "Luau type definitions", || {
generate_luau_defs_from_definitions(&definitions)
})
.await?;
}
if self.generate_selene_types {
generate_and_save_file(FILE_NAME_SELENE_TYPES, "Selene type definitions", || {
generate_selene_defs_from_definitions(&definitions)
})
.await?;
}
if self.generate_docs_file {
generate_and_save_file(FILE_NAME_DOCS, "Luau LSP documentation", || {
generate_docs_json_from_definitions(&definitions, "roblox/global")
})
.await?;
if self.setup
|| self.generate_luau_types
|| self.generate_selene_types
|| self.generate_docs_file
{
generate_typedef_files_from_definitions(&TYPEDEFS_DIR).await?;
}
}
if self.script_path.is_none() {
@ -191,34 +165,3 @@ impl Cli {
})
}
}
async fn generate_and_save_file(
file_path: &str,
display_name: &str,
f: impl Fn() -> Result<String>,
) -> Result<()> {
#[cfg(test)]
use crate::tests::fmt_path_relative_to_workspace_root;
match f() {
Ok(file_contents) => {
write(file_path, file_contents).await?;
#[cfg(not(test))]
println!("Generated {display_name} file at '{file_path}'");
#[cfg(test)]
println!(
"Generated {display_name} file at '{}'",
fmt_path_relative_to_workspace_root(file_path)
);
}
Err(e) => {
#[cfg(not(test))]
println!("Failed to generate {display_name} file at '{file_path}'\n{e}");
#[cfg(test)]
println!(
"Failed to generate {display_name} file at '{}'\n{e}",
fmt_path_relative_to_workspace_root(file_path)
);
}
}
Ok(())
}

View file

@ -1,4 +1,4 @@
use std::collections::{BTreeMap, HashMap};
use std::collections::{BTreeMap, HashMap, HashSet};
use anyhow::{Context, Result};
use full_moon::{
@ -8,7 +8,6 @@ use full_moon::{
},
tokenizer::{TokenReference, TokenType},
};
use regex::Regex;
use super::{
builder::DefinitionsItemBuilder, item::DefinitionsItem, moonwave::parse_moonwave_style_comment,
@ -27,7 +26,7 @@ pub struct DefinitionsParser {
found_top_level_items: BTreeMap<String, DefinitionsParserItem>,
found_top_level_types: HashMap<String, TypeInfo>,
found_top_level_comments: HashMap<String, Option<String>>,
found_top_level_declares: Vec<String>,
found_top_level_exports: Vec<String>,
}
impl DefinitionsParser {
@ -36,7 +35,7 @@ impl DefinitionsParser {
found_top_level_items: BTreeMap::new(),
found_top_level_types: HashMap::new(),
found_top_level_comments: HashMap::new(),
found_top_level_declares: Vec::new(),
found_top_level_exports: Vec::new(),
}
}
@ -50,36 +49,19 @@ impl DefinitionsParser {
where
S: AsRef<str>,
{
// TODO: Properly handle the "declare class" syntax, for now we just skip it
let mut no_class_declares = contents.as_ref().replace("\r\n", "\n");
while let Some(dec) = no_class_declares.find("\ndeclare class") {
let end = no_class_declares.find("\nend").unwrap();
let before = &no_class_declares[0..dec];
let after = &no_class_declares[end + 4..];
no_class_declares = format!("{before}{after}");
}
// Replace declares with export type syntax that can be parsed by full_moon,
// find all declare statements and save declared names for later parsing
let regex_declare = Regex::new(r#"declare (\w+): "#).unwrap();
let resulting_contents = regex_declare
.replace_all(&no_class_declares, "export type $1 =")
.to_string();
let found_declares = regex_declare
.captures_iter(&no_class_declares)
.map(|cap| cap[1].to_string())
.collect();
// Parse contents into top-level parser items for later use
let mut found_top_level_items = BTreeMap::new();
let mut found_top_level_types = HashMap::new();
let mut found_top_level_comments = HashMap::new();
let mut found_top_level_exports = HashSet::new();
let ast =
full_moon::parse(&resulting_contents).context("Failed to parse type definitions")?;
full_moon::parse(contents.as_ref()).context("Failed to parse type definitions")?;
for stmt in ast.nodes().stmts() {
if let Some((declaration, token_reference)) = match stmt {
if let Some((exported, declaration, token_reference)) = match stmt {
Stmt::ExportedTypeDeclaration(exp) => {
Some((exp.type_declaration(), exp.export_token()))
Some((true, exp.type_declaration(), exp.export_token()))
}
Stmt::TypeDeclaration(typ) => Some((typ, typ.type_token())),
Stmt::TypeDeclaration(typ) => Some((false, typ, typ.type_token())),
_ => None,
} {
let name = declaration.type_name().token().to_string();
@ -93,14 +75,17 @@ impl DefinitionsParser {
},
);
found_top_level_types.insert(name.clone(), declaration.type_definition().clone());
found_top_level_comments.insert(name, comment);
found_top_level_comments.insert(name.clone(), comment);
if exported {
found_top_level_exports.insert(name);
}
}
}
// Store results
self.found_top_level_items = found_top_level_items;
self.found_top_level_types = found_top_level_types;
self.found_top_level_comments = found_top_level_comments;
self.found_top_level_declares = found_declares;
self.found_top_level_exports = found_top_level_exports.into_iter().collect();
Ok(())
}
@ -113,7 +98,7 @@ impl DefinitionsParser {
.with_kind(kind.unwrap_or_else(|| item.type_info.parse_definitions_kind()))
.with_name(&item.name)
.with_type(item.type_info.to_string());
if self.found_top_level_declares.contains(&item.name) {
if self.found_top_level_exports.contains(&item.name) {
builder = builder.as_exported();
}
if let Some(comment) = item.comment {
@ -157,7 +142,7 @@ impl DefinitionsParser {
self.found_top_level_items = BTreeMap::new();
self.found_top_level_types = HashMap::new();
self.found_top_level_comments = HashMap::new();
self.found_top_level_declares = Vec::new();
self.found_top_level_exports = Vec::new();
Ok(resulting_items)
}
}

View file

@ -1,228 +0,0 @@
use anyhow::{Context, Result};
use serde_json::{Map as JsonMap, Value as JsonValue};
use super::definitions::{DefinitionsItem, DefinitionsItemTag, DefinitionsTree};
static KEY_DOCUMENTATION: &str = "documentation";
static KEY_KEYS: &str = "keys";
static KEY_NAME: &str = "name";
static KEY_CODE_SAMPLE: &str = "code_sample";
static KEY_LEARN_MORE_LINK: &str = "learn_more_link";
static VALUE_EMPTY: &str = "";
pub fn generate_from_type_definitions(contents: &str, namespace: &str) -> Result<String> {
let tree = DefinitionsTree::from_type_definitions(contents)?;
/*
Extract globals, functions, params, returns from the type definitions tree
Here we will also convert the plain names into proper namespaced names according to the spec at
https://raw.githubusercontent.com/MaximumADHD/Roblox-Client-Tracker/roblox/api-docs/en-us.json
*/
let mut map = JsonMap::new();
// Go over all the exported classes first (globals)
let exported_items = tree.children().iter().filter(|item| {
item.is_exported()
&& (item.is_function()
|| item.children().iter().any(|item_child| {
item_child.is_tag() && item_child.get_name().unwrap() == "class"
}))
});
for item in exported_items {
parse_and_insert(&mut map, item, namespace, None)?;
}
// Go over the rest, these will be global types
// that exported items are referencing somewhere
serde_json::to_string_pretty(&JsonValue::Object(map)).context("Failed to encode docs as json")
}
#[allow(clippy::too_many_lines)]
fn parse_and_insert(
map: &mut JsonMap<String, JsonValue>,
item: &DefinitionsItem,
namespace: &str,
parent: Option<&DefinitionsItem>,
) -> Result<()> {
let mut item_map = JsonMap::new();
let item_name = item
.get_name()
.with_context(|| format!("Missing name for doc item: {item:#?}"))?;
// Include parent name in full name, unless there is no parent (top-level global)
let item_name_full = match parent {
Some(parent) => format!(
"{}.{item_name}",
parent
.get_name()
.with_context(|| format!("Missing parent name for doc item: {item:#?}"))?
),
None => item_name.to_string(),
};
// Try to parse params & returns to use later
let mut params = Vec::new();
let mut returns = Vec::new();
if item.is_function() {
// Map and separate found tags into params & returns
let mut tags = item
.children()
.iter()
.filter_map(|child| {
if let Ok(tag) = DefinitionsItemTag::try_from(child) {
Some(tag)
} else {
None
}
})
.collect::<Vec<_>>();
for tag in tags.drain(..) {
if tag.is_param() {
params.push(tag);
} else if tag.is_return() {
returns.push(tag);
}
}
}
// Try to parse the description for this typedef item, if it has one,
// insert description + code sample + learn more link if they exist
if let Some(description) = item.children().iter().find(|child| child.is_description()) {
let (description, code_sample, learn_more_link) = try_parse_description_for_docs(
description
.get_value()
.context("Missing description value for doc item")?
.to_string(),
);
item_map.insert(
KEY_DOCUMENTATION.to_string(),
JsonValue::String(description),
);
if let Some(code_sample) = code_sample {
item_map.insert(KEY_CODE_SAMPLE.to_string(), JsonValue::String(code_sample));
} else {
item_map.insert(
KEY_CODE_SAMPLE.to_string(),
JsonValue::String(VALUE_EMPTY.to_string()),
);
}
if let Some(learn_more_link) = learn_more_link {
item_map.insert(
KEY_LEARN_MORE_LINK.to_string(),
JsonValue::String(learn_more_link),
);
} else {
item_map.insert(
KEY_LEARN_MORE_LINK.to_string(),
JsonValue::String(VALUE_EMPTY.to_string()),
);
}
}
/*
If the typedef item is a table, we should include keys
which are references from this global to its members,
then we should parse its members and add them in
If it is a function, we should parse its params and args,
make links to them in this object, and then add them in as
separate items into the globals map, with their documentation
*/
if item.is_table() {
let mut keys = item
.children()
.iter()
.filter_map(|child| {
if child.is_property() || child.is_table() || child.is_function() {
Some(child.get_name().expect("Missing name for doc item child"))
} else {
None
}
})
.collect::<Vec<_>>();
if keys.is_empty() {
item_map.insert(KEY_KEYS.to_string(), JsonValue::Object(JsonMap::new()));
} else {
let mut keys_map = JsonMap::new();
for key in keys.drain(..) {
keys_map.insert(
key.to_string(),
JsonValue::String(format!("@{namespace}/{item_name_full}.{key}")),
);
}
item_map.insert(KEY_KEYS.to_string(), JsonValue::Object(keys_map));
}
} else if item.is_function() {
// Add links to params
if params.is_empty() {
item_map.insert("params".to_string(), JsonValue::Array(vec![]));
} else {
let mut params_vec = Vec::new();
for (index, param) in params.iter().enumerate() {
let mut param_map = JsonMap::new();
if let DefinitionsItemTag::Param((name, _)) = param {
param_map.insert(KEY_NAME.to_string(), JsonValue::String(name.to_string()));
param_map.insert(
KEY_DOCUMENTATION.to_string(),
JsonValue::String(format!("@{namespace}/{item_name_full}/param/{index}")),
);
}
params_vec.push(JsonValue::Object(param_map));
}
item_map.insert("params".to_string(), JsonValue::Array(params_vec));
}
// Add links to returns
if returns.is_empty() {
item_map.insert("returns".to_string(), JsonValue::Array(vec![]));
} else {
let mut returns_vec = Vec::new();
for (index, _) in returns.iter().enumerate() {
returns_vec.push(JsonValue::String(format!(
"@{namespace}/{item_name_full}/return/{index}"
)));
}
item_map.insert("returns".to_string(), JsonValue::Array(returns_vec));
}
}
map.insert(
format!("@{namespace}/{item_name_full}"),
JsonValue::Object(item_map),
);
if item.is_table() {
for child in item
.children()
.iter()
.filter(|child| !child.is_description() && !child.is_tag())
{
parse_and_insert(map, child, namespace, Some(item))?;
}
} else if item.is_function() {
// FIXME: It seems the order of params and returns here is not
// deterministic, they can be unordered which leads to confusing docs
for (index, param) in params.iter().enumerate() {
let mut param_map = JsonMap::new();
if let DefinitionsItemTag::Param((_, doc)) = param {
param_map.insert(
KEY_DOCUMENTATION.to_string(),
JsonValue::String(format!("{doc}\n\n---\n")),
);
}
map.insert(
format!("@{namespace}/{item_name_full}/param/{index}"),
JsonValue::Object(param_map),
);
}
for (index, ret) in returns.iter().enumerate() {
let mut return_map = JsonMap::new();
if let DefinitionsItemTag::Return(doc) = ret {
return_map.insert(
KEY_DOCUMENTATION.to_string(),
JsonValue::String(doc.to_string()),
);
}
map.insert(
format!("@{namespace}/{item_name_full}/return/{index}"),
JsonValue::Object(return_map),
);
}
}
Ok(())
}
fn try_parse_description_for_docs(description: String) -> (String, Option<String>, Option<String>) {
// TODO: Implement this
(description, None, None)
}

View file

@ -11,15 +11,11 @@ use super::definitions::{
};
const GENERATED_COMMENT_TAG: &str = "<!-- @generated with lune-cli -->";
const CATEGORY_NONE_NAME: &str = "Uncategorized";
const CATEGORY_NONE_DESC: &str = "
All globals that are not available under a specific scope.
These are to be used directly without indexing a global table first.
";
#[allow(clippy::too_many_lines)]
pub async fn generate_from_type_definitions(contents: HashMap<String, String>) -> Result<()> {
pub async fn generate_from_type_definitions(
definitions: HashMap<String, DefinitionsTree>,
) -> Result<()> {
let mut dirs_to_write = Vec::new();
let mut files_to_write = Vec::new();
// Create the gitbook dir at the repo root
@ -35,64 +31,47 @@ pub async fn generate_from_type_definitions(contents: HashMap<String, String>) -
dirs_to_write.push(path_gitbook_docs_dir.clone());
dirs_to_write.push(path_gitbook_pages_dir.clone());
dirs_to_write.push(path_gitbook_api_dir.clone());
// Sort doc items into subcategories based on globals
let mut api_reference = HashMap::new();
let mut without_main_item = Vec::new();
for (typedef_name, typedef_contents) in contents {
let tree = DefinitionsTree::from_type_definitions(typedef_contents)?;
let main = tree.children().iter().find(
// Convert definition trees into single root items so that we can parse and write markdown recursively
let mut typedef_items = HashMap::new();
for (typedef_name, typedef_contents) in definitions {
let main = typedef_contents
.children()
.iter()
.find(
|c| matches!(c.get_name(), Some(s) if s.to_lowercase() == typedef_name.to_lowercase()),
);
if let Some(main) = main {
let children = tree
.children()
.iter()
.filter_map(|child| {
if child == main {
None
} else {
Some(
DefinitionsItemBuilder::from(child)
.with_kind(DefinitionsItemKind::Type)
.build()
.unwrap(),
)
}
})
.collect::<Vec<_>>();
let root = DefinitionsItemBuilder::new()
.with_kind(main.kind())
.with_name(main.get_name().unwrap())
.with_children(main.children())
.with_children(&children);
api_reference.insert(
typedef_name.clone(),
root.build().expect("Failed to build root definitions item"),
);
} else {
for top_level_item in tree.children() {
without_main_item.push(top_level_item.clone());
}
}
)
.expect("Failed to find main export for generating typedef file");
let children = typedef_contents
.children()
.iter()
.filter_map(|child| {
if child == main {
None
} else {
Some(
DefinitionsItemBuilder::from(child)
.with_kind(DefinitionsItemKind::Type)
.build()
.unwrap(),
)
}
})
.collect::<Vec<_>>();
let root = DefinitionsItemBuilder::new()
.with_kind(main.kind())
.with_name(main.get_name().unwrap())
.with_children(main.children())
.with_children(&children);
let root_item = root.build().expect("Failed to build root definitions item");
typedef_items.insert(typedef_name.to_string(), root_item);
}
// Insert globals with no category into a new "Uncategorized" global
api_reference.insert(
CATEGORY_NONE_NAME.to_string(),
DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Table)
.with_name("Uncategorized")
.with_children(&without_main_item)
.with_child(
DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Description)
.with_value(CATEGORY_NONE_DESC)
.build()?,
)
.build()
.unwrap(),
);
// Generate files for all subcategories
for (category_name, category_item) in api_reference {
for (category_name, category_item) in typedef_items {
let path = path_gitbook_api_dir
.join(category_name.to_ascii_lowercase())
.with_extension("md");

View file

@ -1,10 +0,0 @@
use anyhow::Result;
#[allow(clippy::unnecessary_wraps)]
pub fn generate_from_type_definitions(contents: &str) -> Result<String> {
Ok(format!(
"--> Lune v{}\n\n{}",
env!("CARGO_PKG_VERSION"),
contents
))
}

View file

@ -2,81 +2,37 @@ use std::collections::HashMap;
use anyhow::Result;
use include_dir::Dir;
use regex::Regex;
mod docs_file;
use self::definitions::DefinitionsTree;
mod gitbook_dir;
mod luau_defs;
mod selene_defs;
mod typedef_files;
pub mod definitions;
pub use docs_file::generate_from_type_definitions as generate_docs_json_from_definitions;
pub use luau_defs::generate_from_type_definitions as generate_luau_defs_from_definitions;
pub use selene_defs::generate_from_type_definitions as generate_selene_defs_from_definitions;
pub async fn generate_gitbook_dir_from_definitions(dir: &Dir<'_>) -> Result<()> {
let mut result = HashMap::new();
let definitions = read_typedefs_dir(dir)?;
gitbook_dir::generate_from_type_definitions(definitions).await
}
pub async fn generate_typedef_files_from_definitions(dir: &Dir<'_>) -> Result<()> {
let definitions = read_typedefs_dir(dir)?;
typedef_files::generate_from_type_definitions(definitions).await
}
fn read_typedefs_dir(dir: &Dir<'_>) -> Result<HashMap<String, DefinitionsTree>> {
let mut definitions = HashMap::new();
for entry in dir.find("*.luau").unwrap() {
let entry_file = entry.as_file().unwrap();
let entry_name = entry_file.path().file_name().unwrap().to_string_lossy();
let typedef_name = entry_name.trim_end_matches(".luau");
let typedef_contents = entry_file
.contents_utf8()
.unwrap()
.to_string()
.replace(
&format!("export type {typedef_name} = "),
&format!("declare {}: ", typedef_name.to_ascii_lowercase()),
)
.replace("export type ", "type ");
let typedef_contents = entry_file.contents_utf8().unwrap().to_string();
result.insert(typedef_name.to_string(), typedef_contents);
let typedef_tree = DefinitionsTree::from_type_definitions(&typedef_contents)?;
definitions.insert(typedef_name.to_string(), typedef_tree);
}
match gitbook_dir::generate_from_type_definitions(result).await {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
pub fn generate_typedefs_file_from_dir(dir: &Dir<'_>) -> String {
let mut result = String::new();
for entry in dir.find("*.luau").unwrap() {
let entry_file = entry.as_file().unwrap();
let entry_name = entry_file.path().file_name().unwrap().to_string_lossy();
if entry_name.contains("Globals") {
continue;
}
let typedef_name = entry_name.trim_end_matches(".luau");
let typedef_contents = entry_file.contents_utf8().unwrap().to_string().replace(
&format!("export type {typedef_name} = "),
&format!("declare {}: ", typedef_name.to_ascii_lowercase()),
);
if !result.is_empty() {
result.push_str(&"\n".repeat(10));
}
result.push_str(&typedef_contents);
}
let globals_contents = dir
.get_file("Globals.luau")
.unwrap()
.contents_utf8()
.unwrap();
let regex_export_to_declare = Regex::new(r#"export type (\w+) = "#).unwrap();
let regexed_globals = regex_export_to_declare.replace_all(globals_contents, "declare $1: ");
result.push_str(&"\n".repeat(10));
result.push_str(&regexed_globals);
result
Ok(definitions)
}

View file

@ -1,176 +0,0 @@
use anyhow::{Context, Result};
use full_moon::tokenizer::Symbol;
use serde_yaml::{Mapping as YamlMapping, Sequence as YamlSequence, Value as YamlValue};
use crate::gen::definitions::DefinitionsItemTag;
use super::definitions::{DefinitionsItem, DefinitionsItemKind, DefinitionsTree};
const USE_TYPE_UNIONS: bool = false;
pub fn generate_from_type_definitions(contents: &str) -> Result<String> {
let tree = DefinitionsTree::from_type_definitions(contents)?;
let mut globals = YamlMapping::new();
let top_level_exported_items = tree.children().iter().filter(|top_level| {
top_level.is_exported()
&& (top_level.is_function()
|| top_level.children().iter().any(|top_level_child| {
top_level_child.is_tag() && top_level_child.get_name().unwrap() == "class"
}))
});
for top_level_item in top_level_exported_items {
match top_level_item.kind() {
DefinitionsItemKind::Table => {
let top_level_name = top_level_item
.get_name()
.context("Missing name for top-level doc item")?
.to_string();
for child_item in top_level_item
.children()
.iter()
.filter(|item| item.is_function() || item.is_table() || item.is_property())
{
let child_name = child_item
.get_name()
.context("Missing name for top-level child doc item")?
.to_string();
globals.insert(
YamlValue::String(format!("{top_level_name}.{child_name}")),
YamlValue::Mapping(doc_item_to_selene_yaml_mapping(child_item)?),
);
}
}
DefinitionsItemKind::Function => {
globals.insert(
YamlValue::String(
top_level_item
.get_name()
.context("Missing name for top-level doc item")?
.to_string(),
),
YamlValue::Mapping(doc_item_to_selene_yaml_mapping(top_level_item)?),
);
}
_ => unimplemented!("Globals other than tables and functions are not yet implemented"),
}
}
let mut contents = YamlMapping::new();
contents.insert(
YamlValue::String("globals".to_string()),
YamlValue::Mapping(globals),
);
Ok(format!(
"# Lune v{}\n---\n{}",
env!("CARGO_PKG_VERSION"),
serde_yaml::to_string(&contents).context("Failed to encode type definitions as yaml")?
))
}
fn doc_item_to_selene_yaml_mapping(item: &DefinitionsItem) -> Result<YamlMapping> {
let mut mapping = YamlMapping::new();
if item.is_property() || item.is_table() {
let property_access_tag = item
.children()
.iter()
.find_map(|child| {
if let Ok(tag) = DefinitionsItemTag::try_from(child) {
if tag.is_read_only() || tag.is_read_write() {
Some(tag)
} else {
None
}
} else {
None
}
})
.with_context(|| {
format!(
"Missing property access tag for doc item:\n{}",
item.get_name().unwrap()
)
})?;
mapping.insert(
YamlValue::String("property".to_string()),
YamlValue::String(
match property_access_tag {
DefinitionsItemTag::ReadOnly => "read-only",
DefinitionsItemTag::ReadWrite => "new-fields",
_ => unreachable!(),
}
.to_string(),
),
);
} else if item.is_function() {
let is_must_use = item.children().iter().any(|child| {
if let Ok(tag) = DefinitionsItemTag::try_from(child) {
tag.is_must_use()
} else {
false
}
});
if is_must_use {
mapping.insert(
YamlValue::String("must_use".to_string()),
YamlValue::Bool(true),
);
}
let mut args = YamlSequence::new();
for arg in item.args() {
let arg_type: &str = arg.typedef_simple.as_ref();
let mut arg_mapping = YamlMapping::new();
let (type_str, mut type_opt) = match arg_type.strip_suffix('?') {
Some(stripped) => (stripped, true),
None => (arg_type, false),
};
let simplified = simplify_type_str_into_primitives(
type_str.trim_start_matches('(').trim_end_matches(')'),
);
if simplified.contains("...") {
type_opt = true;
}
if type_opt {
arg_mapping.insert(
YamlValue::String("required".to_string()),
YamlValue::Bool(false),
);
}
arg_mapping.insert(
YamlValue::String("type".to_string()),
YamlValue::String(simplified),
);
args.push(YamlValue::Mapping(arg_mapping));
}
mapping.insert(
YamlValue::String("args".to_string()),
YamlValue::Sequence(args),
);
}
Ok(mapping)
}
fn simplify_type_str_into_primitives(type_str: &str) -> String {
let separator = format!(" {} ", Symbol::Pipe);
// Simplify type strings even further into ones that selene can understand,
// turning types such as `{ bool }` or `"string-literal"` into `table` and `string`
let mut primitives = Vec::new();
for type_inner in type_str.split(&separator) {
if type_inner.starts_with('{') && type_inner.ends_with('}') {
primitives.push("table".to_string());
} else if type_inner.starts_with('"') && type_inner.ends_with('"') {
primitives.push("string".to_string());
} else if type_inner == "boolean" {
primitives.push("bool".to_string());
} else if type_inner == "thread" {
primitives.push("any".to_string());
} else {
primitives.push(type_inner.to_string());
}
}
if primitives.len() > 1 && !USE_TYPE_UNIONS {
"any".to_string()
} else {
primitives.sort_unstable();
primitives.dedup();
primitives.join(&separator)
}
}

View file

@ -0,0 +1,159 @@
use std::{collections::HashMap, fmt::Write};
use anyhow::{Context, Result};
use directories::UserDirs;
use futures_util::future::try_join_all;
use tokio::fs::{create_dir_all, write};
use super::definitions::{DefinitionsItem, DefinitionsTree};
const GENERATED_COMMENT_TAG: &str = "--!strict";
#[allow(clippy::too_many_lines)]
pub async fn generate_from_type_definitions(
api_reference: HashMap<String, DefinitionsTree>,
) -> Result<()> {
let mut dirs_to_write = Vec::new();
let mut files_to_write = Vec::new();
// Create the typedefs dir in the users cache dir
let cache_dir = UserDirs::new()
.context("Failed to find user home directory")?
.home_dir()
.join(".lune")
.join("typedefs")
.join(env!("CARGO_PKG_VERSION"));
dirs_to_write.push(cache_dir.clone());
// Make typedef files
for (category_name, category_tree) in api_reference {
let path = cache_dir
.join(category_name.to_ascii_lowercase())
.with_extension("luau");
let mut contents = String::new();
write!(
contents,
"{GENERATED_COMMENT_TAG}\n-- @lune/{} {}\n",
category_name.to_lowercase(),
env!("CARGO_PKG_VERSION")
)?;
write_tree(&mut contents, category_name, category_tree)?;
files_to_write.push((path, contents));
}
// Write all dirs and files only when we know generation was successful
let futs_dirs = dirs_to_write
.drain(..)
.map(create_dir_all)
.collect::<Vec<_>>();
let futs_files = files_to_write
.drain(..)
.map(|(path, contents)| write(path, contents))
.collect::<Vec<_>>();
try_join_all(futs_dirs).await?;
try_join_all(futs_files).await?;
Ok(())
}
fn make_return_table_item(item: &DefinitionsItem) -> Result<String> {
let mut description = String::new();
if let Some(desc) = item.children().iter().find(|child| child.is_description()) {
write!(description, "\n{}\n", desc.get_value().unwrap().trim())?;
for tag in item.children().iter().filter(|child| child.is_tag()) {
let tag_name = tag.get_name().unwrap();
if tag_name == "param" {
write!(
description,
"\n@param {} {}",
tag.get_meta().unwrap(),
tag.get_value().unwrap()
)?;
} else if tag_name == "return" {
write!(description, "\n@return {}", tag.get_value().unwrap())?;
}
}
}
let mut contents = String::new();
if item.is_function() {
let args = item
.args()
.iter()
.map(|arg| format!("{}: {}", arg.name.trim(), arg.typedef.trim()))
.collect::<Vec<_>>()
.join(", ");
write!(contents, "function ({args})")?;
write!(contents, "\n\treturn nil :: any")?;
write!(contents, "\nend,")?;
} else if item.is_property() {
write!(contents, "(nil :: any) :: {},", item.get_type().unwrap())?;
}
Ok(format!(
"\n--[=[{}\n]=]\n{} = {}",
description.trim_end().replace('\n', "\n\t"),
item.get_name().unwrap_or("_"),
contents
))
}
fn write_tree(contents: &mut String, name: String, root: DefinitionsTree) -> Result<()> {
let main = root
.children()
.iter()
.find(|c| matches!(c.get_name(), Some(s) if s.to_lowercase() == name.to_lowercase()))
.expect("Failed to find main export for generating typedef file");
let mut description = String::new();
if let Some(desc) = main.children().iter().find(|child| child.is_description()) {
write!(description, "\n{}\n", desc.get_value().unwrap().trim())?;
}
let children = root
.children()
.iter()
.filter(|child| child != &main)
.collect::<Vec<_>>();
for child in children {
if child.is_type() || child.is_table() || child.is_function() || child.is_property() {
let mut child_description = String::new();
if let Some(desc) = child.children().iter().find(|child| child.is_description()) {
write!(
child_description,
"\n{}\n",
desc.get_value().unwrap().trim()
)?;
write!(
contents,
"\n--[=[{}\n]=]",
child_description.trim_end().replace('\n', "\n\t"),
)?;
}
if child.is_exported() {
write!(contents, "\nexport ")?;
}
writeln!(
contents,
"type {} = {}",
child.get_name().unwrap(),
child.get_type().unwrap()
)?;
}
}
let mut ret_table = String::new();
for child in main
.children()
.iter()
.filter(|child| child.is_function() || child.is_property())
{
write!(ret_table, "{}", make_return_table_item(child)?)?;
}
write!(
contents,
"\n--[=[{}\n]=]\nreturn {{\n{}\n}}\n",
description.trim_end().replace('\n', "\n\t"),
ret_table.trim_end().replace('\n', "\n\t")
)?;
Ok(())
}

View file

@ -1,14 +1,12 @@
use anyhow::Result;
use crate::cli::{Cli, FILE_NAME_DOCS, FILE_NAME_LUAU_TYPES, FILE_NAME_SELENE_TYPES};
use crate::cli::Cli;
mod bin_dir;
mod file_checks;
mod file_type;
mod run_cli;
pub(crate) use file_checks::*;
pub(crate) use file_type::*;
pub(crate) use run_cli::*;
#[tokio::test]
@ -18,22 +16,8 @@ async fn list() -> Result<()> {
}
#[tokio::test]
async fn generate_selene_types() -> Result<()> {
run_cli(Cli::new().generate_selene_types()).await?;
ensure_file_exists_and_is(FILE_NAME_SELENE_TYPES, FileType::Yaml).await?;
Ok(())
}
#[tokio::test]
async fn generate_luau_types() -> Result<()> {
run_cli(Cli::new().generate_luau_types()).await?;
ensure_file_exists_and_is(FILE_NAME_LUAU_TYPES, FileType::Luau).await?;
Ok(())
}
#[tokio::test]
async fn generate_docs_file() -> Result<()> {
run_cli(Cli::new().generate_docs_file()).await?;
ensure_file_exists_and_is(FILE_NAME_DOCS, FileType::Json).await?;
async fn generate_typedef_files() -> Result<()> {
run_cli(Cli::new().setup()).await?;
// TODO: Implement test
Ok(())
}

View file

@ -23,6 +23,7 @@ lune-roblox = { path = "../lib-roblox", optional = true }
rbx_cookie = { version = "0.1.2", optional = true }
console.workspace = true
directories.workspace = true
futures-util.workspace = true
mlua.workspace = true
once_cell.workspace = true
@ -36,7 +37,6 @@ tokio.workspace = true
async-trait = "0.1"
blocking = "1.3"
dialoguer = "0.10"
directories = "4.0"
dunce = "1.0"
pin-project = "1.0"
os_str_bytes = "6.4"