mirror of
https://github.com/lune-org/lune.git
synced 2024-12-12 13:00:37 +00:00
Improve docs generation, add function types
This commit is contained in:
parent
0d06e096c5
commit
904ffcd212
9 changed files with 195 additions and 82 deletions
|
@ -163,7 +163,7 @@ export type NetWebSocket = {
|
||||||
local response = net.request({
|
local response = net.request({
|
||||||
url = "https://dummyjson.com/products/add",
|
url = "https://dummyjson.com/products/add",
|
||||||
method = "POST",
|
method = "POST",
|
||||||
headers = { ["Content-Type"] = "application/json" },
|
headers = { ["Content-Type"] = "application/json" },
|
||||||
body = net.jsonEncode({
|
body = net.jsonEncode({
|
||||||
title = "Cool Pencil",
|
title = "Cool Pencil",
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
|
|
||||||
use super::{item::DefinitionsItem, kind::DefinitionsItemKind};
|
use super::{
|
||||||
|
item::{DefinitionsItem, DefinitionsItemFunctionArg, DefinitionsItemFunctionRet},
|
||||||
|
kind::DefinitionsItemKind,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct DefinitionsItemBuilder {
|
pub struct DefinitionsItemBuilder {
|
||||||
|
@ -10,7 +13,8 @@ pub struct DefinitionsItemBuilder {
|
||||||
meta: Option<String>,
|
meta: Option<String>,
|
||||||
value: Option<String>,
|
value: Option<String>,
|
||||||
children: Vec<DefinitionsItem>,
|
children: Vec<DefinitionsItem>,
|
||||||
arg_types: Vec<String>,
|
args: Vec<DefinitionsItemFunctionArg>,
|
||||||
|
rets: Vec<DefinitionsItemFunctionRet>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -57,14 +61,26 @@ impl DefinitionsItemBuilder {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_arg_type<S: AsRef<str>>(mut self, arg_type: S) -> Self {
|
pub fn with_arg(mut self, arg: DefinitionsItemFunctionArg) -> Self {
|
||||||
self.arg_types.push(arg_type.as_ref().to_string());
|
self.args.push(arg);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_arg_types<S: AsRef<str>>(mut self, arg_types: &[S]) -> Self {
|
pub fn with_args(mut self, args: &[DefinitionsItemFunctionArg]) -> Self {
|
||||||
for arg_type in arg_types {
|
for arg in args {
|
||||||
self.arg_types.push(arg_type.as_ref().to_string());
|
self.args.push(arg.clone());
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_ret(mut self, ret: DefinitionsItemFunctionRet) -> Self {
|
||||||
|
self.rets.push(ret);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_rets(mut self, rets: &[DefinitionsItemFunctionRet]) -> Self {
|
||||||
|
for ret in rets {
|
||||||
|
self.rets.push(ret.clone());
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -80,7 +96,8 @@ impl DefinitionsItemBuilder {
|
||||||
meta: self.meta,
|
meta: self.meta,
|
||||||
value: self.value,
|
value: self.value,
|
||||||
children,
|
children,
|
||||||
arg_types: self.arg_types,
|
args: self.args,
|
||||||
|
rets: self.rets,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
bail!("Missing doc item kind")
|
bail!("Missing doc item kind")
|
||||||
|
|
|
@ -4,6 +4,47 @@ use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::kind::DefinitionsItemKind;
|
use super::kind::DefinitionsItemKind;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct DefinitionsItemFunctionArg {
|
||||||
|
pub name: String,
|
||||||
|
pub typedef: String,
|
||||||
|
pub typedef_simple: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DefinitionsItemFunctionArg {
|
||||||
|
pub fn new<N, T, TS>(name: N, typedef: T, typedef_simple: TS) -> Self
|
||||||
|
where
|
||||||
|
N: Into<String>,
|
||||||
|
T: Into<String>,
|
||||||
|
TS: Into<String>,
|
||||||
|
{
|
||||||
|
Self {
|
||||||
|
name: name.into(),
|
||||||
|
typedef: typedef.into(),
|
||||||
|
typedef_simple: typedef_simple.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct DefinitionsItemFunctionRet {
|
||||||
|
pub typedef: String,
|
||||||
|
pub typedef_simple: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DefinitionsItemFunctionRet {
|
||||||
|
pub fn new<T, TS>(typedef: T, typedef_simple: TS) -> Self
|
||||||
|
where
|
||||||
|
T: Into<String>,
|
||||||
|
TS: Into<String>,
|
||||||
|
{
|
||||||
|
Self {
|
||||||
|
typedef: typedef.into(),
|
||||||
|
typedef_simple: typedef_simple.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct DefinitionsItem {
|
pub struct DefinitionsItem {
|
||||||
|
@ -19,7 +60,9 @@ pub struct DefinitionsItem {
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
pub(super) children: Vec<DefinitionsItem>,
|
pub(super) children: Vec<DefinitionsItem>,
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
pub(super) arg_types: Vec<String>,
|
pub(super) args: Vec<DefinitionsItemFunctionArg>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
pub(super) rets: Vec<DefinitionsItemFunctionRet>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||||
|
@ -108,7 +151,11 @@ impl DefinitionsItem {
|
||||||
&self.children
|
&self.children
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn arg_types(&self) -> Vec<&str> {
|
pub fn args(&self) -> Vec<&DefinitionsItemFunctionArg> {
|
||||||
self.arg_types.iter().map(String::as_str).collect()
|
self.args.iter().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rets(&self) -> Vec<&DefinitionsItemFunctionRet> {
|
||||||
|
self.rets.iter().collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,26 +39,30 @@ fn parse_moonwave_style_tag(line: &str) -> Option<DefinitionsItem> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn parse_moonwave_style_comment(comment: &str) -> Vec<DefinitionsItem> {
|
pub(super) fn parse_moonwave_style_comment(comment: &str) -> Vec<DefinitionsItem> {
|
||||||
let lines = comment.lines().map(str::trim).collect::<Vec<_>>();
|
let no_tabs = comment.replace('\t', " ");
|
||||||
let indent_len = lines.iter().fold(usize::MAX, |acc, line| {
|
let lines = no_tabs.split('\n').collect::<Vec<_>>();
|
||||||
let first = line.chars().enumerate().find_map(|(idx, ch)| {
|
let indent_len =
|
||||||
if ch.is_alphanumeric() {
|
lines.iter().fold(usize::MAX, |acc, line| {
|
||||||
Some(idx)
|
let first = line.chars().enumerate().find_map(|(idx, ch)| {
|
||||||
|
if ch.is_whitespace() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(idx)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if let Some(first_non_whitespace) = first {
|
||||||
|
acc.min(first_non_whitespace)
|
||||||
} else {
|
} else {
|
||||||
None
|
acc
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if let Some(first_alphanumeric) = first {
|
let unindented_lines = lines.iter().map(|line| {
|
||||||
if first_alphanumeric > 0 {
|
if line.chars().any(|c| !c.is_whitespace()) {
|
||||||
acc.min(first_alphanumeric - 1)
|
&line[indent_len..]
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
acc
|
line
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let unindented_lines = lines.iter().map(|line| &line[indent_len..]);
|
|
||||||
let mut doc_items = Vec::new();
|
let mut doc_items = Vec::new();
|
||||||
let mut doc_lines = Vec::new();
|
let mut doc_lines = Vec::new();
|
||||||
for line in unindented_lines {
|
for line in unindented_lines {
|
||||||
|
@ -72,7 +76,7 @@ pub(super) fn parse_moonwave_style_comment(comment: &str) -> Vec<DefinitionsItem
|
||||||
doc_items.push(
|
doc_items.push(
|
||||||
DefinitionsItemBuilder::new()
|
DefinitionsItemBuilder::new()
|
||||||
.with_kind(DefinitionsItemKind::Description)
|
.with_kind(DefinitionsItemKind::Description)
|
||||||
.with_value(doc_lines.join("\n").trim())
|
.with_value(doc_lines.join("\n"))
|
||||||
.build()
|
.build()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
|
|
|
@ -112,7 +112,7 @@ impl DefinitionsParser {
|
||||||
.type_info
|
.type_info
|
||||||
.extract_args_normalized(&self.found_top_level_types)
|
.extract_args_normalized(&self.found_top_level_types)
|
||||||
{
|
{
|
||||||
builder = builder.with_arg_types(&args);
|
builder = builder.with_args(&args);
|
||||||
}
|
}
|
||||||
if let TypeInfo::Table { fields, .. } = item.type_info {
|
if let TypeInfo::Table { fields, .. } = item.type_info {
|
||||||
for field in fields.iter() {
|
for field in fields.iter() {
|
||||||
|
@ -156,5 +156,5 @@ fn find_token_moonwave_comment(token: &TokenReference) -> Option<String> {
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.last()
|
.last()
|
||||||
.map(|comment| comment.trim().to_string())
|
.map(ToString::to_string)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,10 @@ use full_moon::{
|
||||||
ShortString,
|
ShortString,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::kind::DefinitionsItemKind;
|
use super::{
|
||||||
|
item::{DefinitionsItemFunctionArg, DefinitionsItemFunctionRet},
|
||||||
|
kind::DefinitionsItemKind,
|
||||||
|
};
|
||||||
|
|
||||||
pub(crate) trait TypeInfoExt {
|
pub(crate) trait TypeInfoExt {
|
||||||
fn is_fn(&self) -> bool;
|
fn is_fn(&self) -> bool;
|
||||||
|
@ -20,7 +23,12 @@ pub(crate) trait TypeInfoExt {
|
||||||
fn extract_args_normalized(
|
fn extract_args_normalized(
|
||||||
&self,
|
&self,
|
||||||
type_lookup_table: &HashMap<String, TypeInfo>,
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
) -> Option<Vec<String>>;
|
) -> Option<Vec<DefinitionsItemFunctionArg>>;
|
||||||
|
// fn extract_rets(&self) -> Vec<TypeArgument>;
|
||||||
|
// fn extract_rets_normalized(
|
||||||
|
// &self,
|
||||||
|
// type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
|
// ) -> Option<Vec<DefinitionsItemFunctionRet>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeInfoExt for TypeInfo {
|
impl TypeInfoExt for TypeInfo {
|
||||||
|
@ -200,55 +208,32 @@ impl TypeInfoExt for TypeInfo {
|
||||||
fn extract_args_normalized(
|
fn extract_args_normalized(
|
||||||
&self,
|
&self,
|
||||||
type_lookup_table: &HashMap<String, TypeInfo>,
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
) -> Option<Vec<String>> {
|
) -> Option<Vec<DefinitionsItemFunctionArg>> {
|
||||||
if self.is_fn() {
|
if self.is_fn() {
|
||||||
let separator = format!(" {} ", Symbol::Pipe);
|
|
||||||
let args_stringified_not_normalized = self
|
let args_stringified_not_normalized = self
|
||||||
.extract_args()
|
.extract_args()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|type_arg| {
|
.map(|type_arg| {
|
||||||
type_arg
|
(
|
||||||
.type_info()
|
type_arg
|
||||||
.stringify_simple(Some(self), type_lookup_table)
|
.name()
|
||||||
|
.map_or_else(|| "_".to_string(), |n| n.0.to_string()),
|
||||||
|
type_arg.type_info().to_string(),
|
||||||
|
type_arg
|
||||||
|
.type_info()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let mut args_stringified = Vec::new();
|
let mut args = Vec::new();
|
||||||
for arg_string in args_stringified_not_normalized {
|
for (arg_name, arg_typedef, arg_typedef_simplified) in args_stringified_not_normalized {
|
||||||
let arg_parts = arg_string.split(&separator).collect::<Vec<_>>();
|
args.push(DefinitionsItemFunctionArg::new(
|
||||||
// Check if we got any optional arg, if so then the entire possible
|
arg_name,
|
||||||
// union of args will be optional when merged together / normalized
|
arg_typedef,
|
||||||
let is_optional = arg_parts
|
normalize_type(&arg_typedef_simplified),
|
||||||
.iter()
|
));
|
||||||
.any(|part| part == &"nil" || part.ends_with('?'));
|
|
||||||
// Get rid of any nils or optional markers since we keep track of it above
|
|
||||||
let mut arg_parts_no_nils = arg_parts
|
|
||||||
.iter()
|
|
||||||
.filter_map(|arg_part| {
|
|
||||||
if arg_part == &"nil" {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(arg_part.trim_end_matches('?'))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
arg_parts_no_nils.sort_unstable(); // Sort the args to be able to dedup
|
|
||||||
arg_parts_no_nils.dedup(); // Deduplicate types that are the exact same shape
|
|
||||||
if is_optional {
|
|
||||||
if arg_parts_no_nils.len() > 1 {
|
|
||||||
// A union of args that is nillable should be enclosed in parens to make
|
|
||||||
// it more clear that the entire arg is nillable and not just the last type
|
|
||||||
args_stringified.push(format!("({})?", arg_parts_no_nils.join(&separator)));
|
|
||||||
} else {
|
|
||||||
// Just one nillable arg, does not need any parens
|
|
||||||
args_stringified.push(format!("{}?", arg_parts_no_nils.first().unwrap()));
|
|
||||||
}
|
|
||||||
} else if arg_parts_no_nils.len() > 1 {
|
|
||||||
args_stringified.push(arg_parts_no_nils.join(&separator).to_string());
|
|
||||||
} else {
|
|
||||||
args_stringified.push((*arg_parts_no_nils.first().unwrap()).to_string());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Some(args_stringified)
|
Some(args)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -311,3 +296,38 @@ fn merge_type_argument_vecs(
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn normalize_type(simplified: &str) -> String {
|
||||||
|
let separator = format!(" {} ", Symbol::Pipe);
|
||||||
|
let arg_parts = simplified.split(&separator).collect::<Vec<_>>();
|
||||||
|
// Check if we got any optional arg, if so then the entire possible
|
||||||
|
// union of args will be optional when merged together / normalized
|
||||||
|
let is_optional = arg_parts
|
||||||
|
.iter()
|
||||||
|
.any(|part| part == &"nil" || part.ends_with('?'));
|
||||||
|
// Get rid of any nils or optional markers since we keep track of it above
|
||||||
|
let mut arg_parts_no_nils = arg_parts
|
||||||
|
.iter()
|
||||||
|
.filter_map(|arg_part| {
|
||||||
|
if arg_part == &"nil" {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(arg_part.trim_end_matches('?'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
arg_parts_no_nils.sort_unstable(); // Sort the args to be able to dedup
|
||||||
|
arg_parts_no_nils.dedup(); // Deduplicate types that are the exact same shape
|
||||||
|
if is_optional {
|
||||||
|
if arg_parts_no_nils.len() > 1 {
|
||||||
|
// A union of args that is nillable should be enclosed in parens to make
|
||||||
|
// it more clear that the entire arg is nillable and not just the last type
|
||||||
|
format!("({})?", arg_parts_no_nils.join(&separator))
|
||||||
|
} else {
|
||||||
|
// Just one nillable arg, does not need any parens
|
||||||
|
format!("{}?", arg_parts_no_nils.first().unwrap())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
arg_parts_no_nils.join(&separator)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -78,7 +78,7 @@ pub async fn generate_from_type_definitions(contents: &str) -> Result<()> {
|
||||||
.with_extension("md");
|
.with_extension("md");
|
||||||
let mut contents = String::new();
|
let mut contents = String::new();
|
||||||
write!(contents, "{GENERATED_COMMENT_TAG}\n\n")?;
|
write!(contents, "{GENERATED_COMMENT_TAG}\n\n")?;
|
||||||
generate_markdown_documentation(&mut contents, &category_item)?;
|
generate_markdown_documentation(&mut contents, &category_item, 0)?;
|
||||||
files_to_write.push((path, post_process_docs(contents)));
|
files_to_write.push((path, post_process_docs(contents)));
|
||||||
}
|
}
|
||||||
// Write all dirs and files only when we know generation was successful
|
// Write all dirs and files only when we know generation was successful
|
||||||
|
@ -113,7 +113,11 @@ fn get_name(item: &DefinitionsItem) -> Result<String> {
|
||||||
.context("Definitions item is missing a name")
|
.context("Definitions item is missing a name")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_markdown_documentation(contents: &mut String, item: &DefinitionsItem) -> Result<()> {
|
fn generate_markdown_documentation(
|
||||||
|
contents: &mut String,
|
||||||
|
item: &DefinitionsItem,
|
||||||
|
depth: usize,
|
||||||
|
) -> Result<()> {
|
||||||
match item.kind() {
|
match item.kind() {
|
||||||
DefinitionsItemKind::Table
|
DefinitionsItemKind::Table
|
||||||
| DefinitionsItemKind::Property
|
| DefinitionsItemKind::Property
|
||||||
|
@ -126,14 +130,35 @@ fn generate_markdown_documentation(contents: &mut String, item: &DefinitionsItem
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
DefinitionsItemKind::Description => {
|
DefinitionsItemKind::Description => {
|
||||||
|
let desc = item.get_value().context("Description is missing a value")?;
|
||||||
write!(
|
write!(
|
||||||
contents,
|
contents,
|
||||||
"\n{}\n",
|
"\n{}\n",
|
||||||
item.get_value().context("Description is missing a value")?
|
if depth >= 2 {
|
||||||
|
// HACK: We know our typedefs are formatted like this and
|
||||||
|
// it looks nicer to have this bolding instead of two
|
||||||
|
// headers using "###" in the function definition
|
||||||
|
desc.replace("### Example usage", "**Example usage:**")
|
||||||
|
} else {
|
||||||
|
desc.to_string()
|
||||||
|
}
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
if item.kind().is_function() && !item.args().is_empty() {
|
||||||
|
let args = item
|
||||||
|
.args()
|
||||||
|
.iter()
|
||||||
|
.map(|arg| format!("{}: {}", arg.name, arg.typedef))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
write!(
|
||||||
|
contents,
|
||||||
|
"\n```lua\nfunction {}({})\n```\n",
|
||||||
|
item.get_name().unwrap_or("_"),
|
||||||
|
args.join(", ")
|
||||||
|
)?;
|
||||||
|
}
|
||||||
let descriptions = item
|
let descriptions = item
|
||||||
.children()
|
.children()
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -150,19 +175,19 @@ fn generate_markdown_documentation(contents: &mut String, item: &DefinitionsItem
|
||||||
.filter(|child| child.is_function())
|
.filter(|child| child.is_function())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
for description in descriptions {
|
for description in descriptions {
|
||||||
generate_markdown_documentation(contents, description)?;
|
generate_markdown_documentation(contents, description, depth + 1)?;
|
||||||
}
|
}
|
||||||
if !properties.is_empty() {
|
if !properties.is_empty() {
|
||||||
write!(contents, "\n\n---\n\n## Properties\n\n")?;
|
write!(contents, "\n\n---\n\n## Properties\n\n")?;
|
||||||
}
|
}
|
||||||
for property in properties {
|
for property in properties {
|
||||||
generate_markdown_documentation(contents, property)?;
|
generate_markdown_documentation(contents, property, depth + 1)?;
|
||||||
}
|
}
|
||||||
if !functions.is_empty() {
|
if !functions.is_empty() {
|
||||||
write!(contents, "\n\n---\n\n## Functions\n\n")?;
|
write!(contents, "\n\n---\n\n## Functions\n\n")?;
|
||||||
}
|
}
|
||||||
for function in functions {
|
for function in functions {
|
||||||
generate_markdown_documentation(contents, function)?;
|
generate_markdown_documentation(contents, function, depth + 1)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,7 +115,8 @@ fn doc_item_to_selene_yaml_mapping(item: &DefinitionsItem) -> Result<YamlMapping
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let mut args = YamlSequence::new();
|
let mut args = YamlSequence::new();
|
||||||
for arg_type in item.arg_types() {
|
for arg in item.args() {
|
||||||
|
let arg_type: &str = arg.typedef_simple.as_ref();
|
||||||
let mut arg_mapping = YamlMapping::new();
|
let mut arg_mapping = YamlMapping::new();
|
||||||
let (type_str, mut type_opt) = match arg_type.strip_suffix('?') {
|
let (type_str, mut type_opt) = match arg_type.strip_suffix('?') {
|
||||||
Some(stripped) => (stripped, true),
|
Some(stripped) => (stripped, true),
|
||||||
|
|
|
@ -42,7 +42,7 @@ impl TaskSchedulerResumeExt for TaskScheduler<'_> {
|
||||||
*/
|
*/
|
||||||
async fn resume_queue(&self) -> TaskSchedulerState {
|
async fn resume_queue(&self) -> TaskSchedulerState {
|
||||||
let current = TaskSchedulerState::new(self);
|
let current = TaskSchedulerState::new(self);
|
||||||
let result = if current.num_blocking > 0 {
|
if current.num_blocking > 0 {
|
||||||
// 1. Blocking tasks
|
// 1. Blocking tasks
|
||||||
resume_next_blocking_task(self, None)
|
resume_next_blocking_task(self, None)
|
||||||
} else if current.num_futures > 0 || current.num_background > 0 {
|
} else if current.num_futures > 0 || current.num_background > 0 {
|
||||||
|
@ -57,8 +57,7 @@ impl TaskSchedulerResumeExt for TaskScheduler<'_> {
|
||||||
// a busy loop to prevent cpu usage from going to 100%
|
// a busy loop to prevent cpu usage from going to 100%
|
||||||
sleep(Duration::from_millis(1)).await;
|
sleep(Duration::from_millis(1)).await;
|
||||||
TaskSchedulerState::new(self)
|
TaskSchedulerState::new(self)
|
||||||
};
|
}
|
||||||
result
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue