mirror of
https://github.com/lune-org/lune.git
synced 2025-01-05 19:09:10 +00:00
Support referenced types in parser type stringification
This commit is contained in:
parent
77623264ae
commit
a96745c292
2 changed files with 97 additions and 42 deletions
|
@ -1,3 +1,5 @@
|
||||||
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use full_moon::{
|
use full_moon::{
|
||||||
ast::{
|
ast::{
|
||||||
|
@ -20,16 +22,19 @@ struct DefinitionsParserItem {
|
||||||
type_info: TypeInfo,
|
type_info: TypeInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct DefinitionsParser {
|
pub struct DefinitionsParser {
|
||||||
found_top_level_items: Vec<DefinitionsParserItem>,
|
found_top_level_items: BTreeMap<String, DefinitionsParserItem>,
|
||||||
|
found_top_level_types: HashMap<String, TypeInfo>,
|
||||||
found_top_level_declares: Vec<String>,
|
found_top_level_declares: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefinitionsParser {
|
impl DefinitionsParser {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
..Default::default()
|
found_top_level_items: BTreeMap::new(),
|
||||||
|
found_top_level_types: HashMap::new(),
|
||||||
|
found_top_level_declares: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +67,8 @@ impl DefinitionsParser {
|
||||||
.map(|cap| cap[1].to_string())
|
.map(|cap| cap[1].to_string())
|
||||||
.collect();
|
.collect();
|
||||||
// Parse contents into top-level parser items for later use
|
// Parse contents into top-level parser items for later use
|
||||||
let mut found_top_level_items = Vec::new();
|
let mut found_top_level_items = BTreeMap::new();
|
||||||
|
let mut found_top_level_types = HashMap::new();
|
||||||
let ast =
|
let ast =
|
||||||
full_moon::parse(&resulting_contents).context("Failed to parse type definitions")?;
|
full_moon::parse(&resulting_contents).context("Failed to parse type definitions")?;
|
||||||
for stmt in ast.nodes().stmts() {
|
for stmt in ast.nodes().stmts() {
|
||||||
|
@ -73,15 +79,21 @@ impl DefinitionsParser {
|
||||||
Stmt::TypeDeclaration(typ) => Some((typ, typ.type_token())),
|
Stmt::TypeDeclaration(typ) => Some((typ, typ.type_token())),
|
||||||
_ => None,
|
_ => None,
|
||||||
} {
|
} {
|
||||||
found_top_level_items.push(DefinitionsParserItem {
|
let name = declaration.type_name().token().to_string();
|
||||||
name: declaration.type_name().token().to_string(),
|
found_top_level_items.insert(
|
||||||
comment: find_token_moonwave_comment(token_reference),
|
name.clone(),
|
||||||
type_info: declaration.type_definition().clone(),
|
DefinitionsParserItem {
|
||||||
});
|
name: name.clone(),
|
||||||
|
comment: find_token_moonwave_comment(token_reference),
|
||||||
|
type_info: declaration.type_definition().clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
found_top_level_types.insert(name, declaration.type_definition().clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Store results
|
// Store results
|
||||||
self.found_top_level_items = found_top_level_items;
|
self.found_top_level_items = found_top_level_items;
|
||||||
|
self.found_top_level_types = found_top_level_types;
|
||||||
self.found_top_level_declares = found_declares;
|
self.found_top_level_declares = found_declares;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -96,7 +108,10 @@ impl DefinitionsParser {
|
||||||
if let Some(comment) = item.comment {
|
if let Some(comment) = item.comment {
|
||||||
builder = builder.with_children(&parse_moonwave_style_comment(&comment));
|
builder = builder.with_children(&parse_moonwave_style_comment(&comment));
|
||||||
}
|
}
|
||||||
if let Some(args) = item.type_info.extract_args_normalized() {
|
if let Some(args) = item
|
||||||
|
.type_info
|
||||||
|
.extract_args_normalized(&self.found_top_level_types)
|
||||||
|
{
|
||||||
builder = builder.with_arg_types(&args);
|
builder = builder.with_arg_types(&args);
|
||||||
}
|
}
|
||||||
if let TypeInfo::Table { fields, .. } = item.type_info {
|
if let TypeInfo::Table { fields, .. } = item.type_info {
|
||||||
|
@ -122,11 +137,11 @@ impl DefinitionsParser {
|
||||||
*/
|
*/
|
||||||
#[allow(clippy::unnecessary_wraps)]
|
#[allow(clippy::unnecessary_wraps)]
|
||||||
pub fn drain(&mut self) -> Result<Vec<DefinitionsItem>> {
|
pub fn drain(&mut self) -> Result<Vec<DefinitionsItem>> {
|
||||||
let mut top_level_items = self.found_top_level_items.drain(..).collect::<Vec<_>>();
|
let mut results = Vec::new();
|
||||||
let results = top_level_items
|
for top_level_item in self.found_top_level_items.values() {
|
||||||
.drain(..)
|
results.push(self.convert_parser_item_into_doc_item(top_level_item.clone()));
|
||||||
.map(|visitor_item| self.convert_parser_item_into_doc_item(visitor_item))
|
}
|
||||||
.collect();
|
self.found_top_level_items = BTreeMap::new();
|
||||||
self.found_top_level_declares = Vec::new();
|
self.found_top_level_declares = Vec::new();
|
||||||
Ok(results)
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use full_moon::{
|
use full_moon::{
|
||||||
ast::types::{TypeArgument, TypeInfo},
|
ast::types::{TypeArgument, TypeInfo},
|
||||||
tokenizer::{Symbol, Token, TokenReference, TokenType},
|
tokenizer::{Symbol, Token, TokenReference, TokenType},
|
||||||
|
@ -8,9 +10,16 @@ use super::kind::DefinitionsItemKind;
|
||||||
pub(crate) trait TypeInfoExt {
|
pub(crate) trait TypeInfoExt {
|
||||||
fn is_fn(&self) -> bool;
|
fn is_fn(&self) -> bool;
|
||||||
fn parse_definitions_kind(&self) -> DefinitionsItemKind;
|
fn parse_definitions_kind(&self) -> DefinitionsItemKind;
|
||||||
fn stringify_simple(&self, parent_typ: Option<&TypeInfo>) -> String;
|
fn stringify_simple(
|
||||||
|
&self,
|
||||||
|
parent_typ: Option<&TypeInfo>,
|
||||||
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
|
) -> String;
|
||||||
fn extract_args(&self, base: Vec<TypeArgument>) -> Vec<TypeArgument>;
|
fn extract_args(&self, base: Vec<TypeArgument>) -> Vec<TypeArgument>;
|
||||||
fn extract_args_normalized(&self) -> Option<Vec<String>>;
|
fn extract_args_normalized(
|
||||||
|
&self,
|
||||||
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
|
) -> Option<Vec<String>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeInfoExt for TypeInfo {
|
impl TypeInfoExt for TypeInfo {
|
||||||
|
@ -90,44 +99,68 @@ impl TypeInfoExt for TypeInfo {
|
||||||
* `{ TypeName }`
|
* `{ TypeName }`
|
||||||
* `"string-literal"`
|
* `"string-literal"`
|
||||||
*/
|
*/
|
||||||
fn stringify_simple(&self, parent_typ: Option<&TypeInfo>) -> String {
|
fn stringify_simple(
|
||||||
|
&self,
|
||||||
|
parent_typ: Option<&TypeInfo>,
|
||||||
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
|
) -> String {
|
||||||
match self {
|
match self {
|
||||||
TypeInfo::Array { type_info, .. } => {
|
TypeInfo::Array { type_info, .. } => {
|
||||||
format!("{{ {} }}", type_info.as_ref().stringify_simple(Some(self)))
|
format!(
|
||||||
|
"{{ {} }}",
|
||||||
|
type_info
|
||||||
|
.as_ref()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
TypeInfo::Basic(tok) => match parent_typ {
|
TypeInfo::Basic(tok) => {
|
||||||
Some(TypeInfo::Callback { generics, .. }) => {
|
let tok_str = tok.token().to_string();
|
||||||
if let Some(generics) = generics {
|
let mut any_str = None;
|
||||||
// If the function that contains this arg has generic and a
|
// If the function that contains this arg has generic and a
|
||||||
// generic is the same as this token, we stringify it as any
|
// generic is the same as this token, we stringify it as any
|
||||||
if generics
|
if let Some(TypeInfo::Callback {
|
||||||
.generics()
|
generics: Some(callback_generics),
|
||||||
.iter()
|
..
|
||||||
.any(|g| g.to_string() == tok.token().to_string())
|
}) = parent_typ
|
||||||
{
|
{
|
||||||
"any".to_string()
|
if callback_generics
|
||||||
} else {
|
.generics()
|
||||||
tok.token().to_string()
|
.iter()
|
||||||
}
|
.any(|g| g.to_string() == tok_str)
|
||||||
} else {
|
{
|
||||||
tok.token().to_string()
|
any_str = Some("any".to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => tok.token().to_string(),
|
// Also check if we got a referenced type, meaning that it
|
||||||
},
|
// exists in the lookup table of global types passed to us
|
||||||
|
if let Some(any_str) = any_str {
|
||||||
|
any_str
|
||||||
|
} else if let Some(referenced_typ) = type_lookup_table.get(&tok_str) {
|
||||||
|
referenced_typ.stringify_simple(None, type_lookup_table)
|
||||||
|
} else {
|
||||||
|
tok_str
|
||||||
|
}
|
||||||
|
}
|
||||||
TypeInfo::String(str) => str.token().to_string(),
|
TypeInfo::String(str) => str.token().to_string(),
|
||||||
TypeInfo::Boolean(_) => "boolean".to_string(),
|
TypeInfo::Boolean(_) => "boolean".to_string(),
|
||||||
TypeInfo::Callback { .. } => "function".to_string(),
|
TypeInfo::Callback { .. } => "function".to_string(),
|
||||||
TypeInfo::Optional { base, .. } => {
|
TypeInfo::Optional { base, .. } => {
|
||||||
format!("{}?", base.as_ref().stringify_simple(Some(self)))
|
format!(
|
||||||
|
"{}?",
|
||||||
|
base.as_ref()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
TypeInfo::Table { .. } => "table".to_string(),
|
TypeInfo::Table { .. } => "table".to_string(),
|
||||||
TypeInfo::Union { left, right, .. } => {
|
TypeInfo::Union { left, right, .. } => {
|
||||||
format!(
|
format!(
|
||||||
"{} {} {}",
|
"{} {} {}",
|
||||||
left.as_ref().stringify_simple(Some(self)),
|
left.as_ref()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table),
|
||||||
Symbol::Pipe,
|
Symbol::Pipe,
|
||||||
right.as_ref().stringify_simple(Some(self))
|
right
|
||||||
|
.as_ref()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
// TODO: Stringify custom table types properly, these show up as basic tokens
|
// TODO: Stringify custom table types properly, these show up as basic tokens
|
||||||
|
@ -156,13 +189,20 @@ impl TypeInfoExt for TypeInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_args_normalized(&self) -> Option<Vec<String>> {
|
fn extract_args_normalized(
|
||||||
|
&self,
|
||||||
|
type_lookup_table: &HashMap<String, TypeInfo>,
|
||||||
|
) -> Option<Vec<String>> {
|
||||||
if self.is_fn() {
|
if self.is_fn() {
|
||||||
let separator = format!(" {} ", Symbol::Pipe);
|
let separator = format!(" {} ", Symbol::Pipe);
|
||||||
let args_stringified_not_normalized = self
|
let args_stringified_not_normalized = self
|
||||||
.extract_args(Vec::new())
|
.extract_args(Vec::new())
|
||||||
.iter()
|
.iter()
|
||||||
.map(|type_arg| type_arg.type_info().stringify_simple(Some(self)))
|
.map(|type_arg| {
|
||||||
|
type_arg
|
||||||
|
.type_info()
|
||||||
|
.stringify_simple(Some(self), type_lookup_table)
|
||||||
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let mut args_stringified = Vec::new();
|
let mut args_stringified = Vec::new();
|
||||||
for arg_string in args_stringified_not_normalized {
|
for arg_string in args_stringified_not_normalized {
|
||||||
|
|
Loading…
Reference in a new issue