Clean up typedefs parser

This commit is contained in:
Filip Tibell 2023-02-22 11:31:58 +01:00
parent 09db6d5d8a
commit 7c25c3319d
No known key found for this signature in database
4 changed files with 300 additions and 263 deletions

View file

@ -5,9 +5,11 @@ mod moonwave;
mod parser; mod parser;
mod tag; mod tag;
mod tree; mod tree;
mod type_info_ext;
pub use item::DefinitionsItem; pub use item::DefinitionsItem;
pub use kind::DefinitionsItemKind; pub use kind::DefinitionsItemKind;
pub use parser::PIPE_SEPARATOR;
pub use tag::DefinitionsItemTag; pub use tag::DefinitionsItemTag;
pub use tree::DefinitionsTree; pub use tree::DefinitionsTree;
pub const PIPE_SEPARATOR: &str = " | ";

View file

@ -1,7 +1,7 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use full_moon::{ use full_moon::{
ast::{ ast::{
types::{TypeArgument, TypeFieldKey, TypeInfo}, types::{TypeFieldKey, TypeInfo},
Stmt, Stmt,
}, },
tokenizer::{TokenReference, TokenType}, tokenizer::{TokenReference, TokenType},
@ -9,12 +9,10 @@ use full_moon::{
use regex::Regex; use regex::Regex;
use super::{ use super::{
builder::DefinitionsItemBuilder, item::DefinitionsItem, kind::DefinitionsItemKind, builder::DefinitionsItemBuilder, item::DefinitionsItem, moonwave::parse_moonwave_style_comment,
moonwave::parse_moonwave_style_comment, type_info_ext::TypeInfoExt,
}; };
pub const PIPE_SEPARATOR: &str = " | ";
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct DefinitionsParserItem { struct DefinitionsParserItem {
name: String, name: String,
@ -22,84 +20,33 @@ struct DefinitionsParserItem {
type_info: TypeInfo, type_info: TypeInfo,
} }
impl DefinitionsParserItem { #[derive(Debug, Default, Clone)]
fn into_doc_item(self, type_definition_declares: &Vec<String>) -> DefinitionsItem { pub struct DefinitionsParser {
let mut builder = DefinitionsItemBuilder::new() found_top_level_items: Vec<DefinitionsParserItem>,
.with_kind(DefinitionsItemKind::from(&self.type_info)) found_top_level_declares: Vec<String>,
.with_name(&self.name); }
if type_definition_declares.contains(&self.name) {
builder = builder.as_exported(); impl DefinitionsParser {
} pub fn new() -> Self {
if let Some(comment) = self.comment {
builder = builder.with_children(&parse_moonwave_style_comment(&comment));
}
if let Some(args) = try_extract_normalized_function_args(&self.type_info) {
builder = builder.with_arg_types(&args);
}
if let TypeInfo::Table { fields, .. } = self.type_info {
for field in fields.iter() {
if let TypeFieldKey::Name(name) = field.key() {
builder = builder.with_child(
Self { Self {
name: name.token().to_string(), ..Default::default()
comment: find_token_moonwave_comment(name),
type_info: field.value().clone(),
}
.into_doc_item(type_definition_declares),
);
} }
} }
}
builder.build().unwrap()
}
}
impl From<&TypeInfo> for DefinitionsItemKind { pub fn parse<S>(&mut self, contents: S) -> Result<()>
fn from(value: &TypeInfo) -> Self { where
match value { S: AsRef<str>,
TypeInfo::Array { .. } | TypeInfo::Table { .. } => DefinitionsItemKind::Table, {
TypeInfo::Basic(_) | TypeInfo::String(_) => DefinitionsItemKind::Property,
TypeInfo::Optional { base, .. } => Self::from(base.as_ref()),
TypeInfo::Tuple { types, .. } => {
let mut kinds = types.iter().map(Self::from).collect::<Vec<_>>();
let kinds_all_the_same = kinds.windows(2).all(|w| w[0] == w[1]);
if kinds_all_the_same && !kinds.is_empty() {
kinds.pop().unwrap()
} else {
unimplemented!(
"Missing support for tuple with differing types in type definitions parser",
)
}
}
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
let kind_left = Self::from(left.as_ref());
let kind_right = Self::from(right.as_ref());
if kind_left == kind_right {
kind_left
} else {
unimplemented!(
"Missing support for union/intersection with differing types in type definitions parser",
)
}
}
typ if type_info_is_fn(typ) => DefinitionsItemKind::Function,
typ => unimplemented!(
"Missing support for TypeInfo in type definitions parser:\n{}",
typ.to_string()
),
}
}
}
fn parse_type_definitions_declares(contents: &str) -> (String, Vec<String>) {
// TODO: Properly handle the "declare class" syntax, for now we just skip it // TODO: Properly handle the "declare class" syntax, for now we just skip it
let mut no_class_declares = contents.to_string(); let mut no_class_declares = contents.as_ref().to_string();
while let Some(dec) = no_class_declares.find("\ndeclare class") { while let Some(dec) = no_class_declares.find("\ndeclare class") {
let end = no_class_declares.find("\nend").unwrap(); let end = no_class_declares.find("\nend").unwrap();
let before = &no_class_declares[0..dec]; let before = &no_class_declares[0..dec];
let after = &no_class_declares[end + 4..]; let after = &no_class_declares[end + 4..];
no_class_declares = format!("{before}{after}"); no_class_declares = format!("{before}{after}");
} }
// Replace declares with export type syntax that can be parsed by full_moon,
// find all declare statements and save declared names for later parsing
let regex_declare = Regex::new(r#"declare (\w+): "#).unwrap(); let regex_declare = Regex::new(r#"declare (\w+): "#).unwrap();
let resulting_contents = regex_declare let resulting_contents = regex_declare
.replace_all(&no_class_declares, "export type $1 =") .replace_all(&no_class_declares, "export type $1 =")
@ -108,18 +55,10 @@ fn parse_type_definitions_declares(contents: &str) -> (String, Vec<String>) {
.captures_iter(&no_class_declares) .captures_iter(&no_class_declares)
.map(|cap| cap[1].to_string()) .map(|cap| cap[1].to_string())
.collect(); .collect();
(resulting_contents, found_declares) // Parse contents into top-level parser items for later use
}
pub fn parse_type_definitions_into_doc_items<S>(contents: S) -> Result<Vec<DefinitionsItem>>
where
S: AsRef<str>,
{
let mut found_top_level_items = Vec::new(); let mut found_top_level_items = Vec::new();
let (type_definition_contents, type_definition_declares) =
parse_type_definitions_declares(contents.as_ref());
let ast = let ast =
full_moon::parse(&type_definition_contents).context("Failed to parse type definitions")?; full_moon::parse(&resulting_contents).context("Failed to parse type definitions")?;
for stmt in ast.nodes().stmts() { for stmt in ast.nodes().stmts() {
if let Some((declaration, token_reference)) = match stmt { if let Some((declaration, token_reference)) = match stmt {
Stmt::ExportedTypeDeclaration(exp) => { Stmt::ExportedTypeDeclaration(exp) => {
@ -135,164 +74,49 @@ where
}); });
} }
} }
Ok(found_top_level_items // Store results
.drain(..) self.found_top_level_items = found_top_level_items;
.map(|visitor_item| visitor_item.into_doc_item(&type_definition_declares)) self.found_top_level_declares = found_declares;
.collect()) Ok(())
} }
fn simple_stringify_type_info(typ: &TypeInfo, parent_typ: Option<&TypeInfo>) -> String { fn convert_parser_item_into_doc_item(&self, item: DefinitionsParserItem) -> DefinitionsItem {
match typ { let mut builder = DefinitionsItemBuilder::new()
TypeInfo::Array { type_info, .. } => { .with_kind(item.type_info.to_definitions_kind())
format!("{{ {} }}", simple_stringify_type_info(type_info, Some(typ))) .with_name(&item.name);
if self.found_top_level_declares.contains(&item.name) {
builder = builder.as_exported();
} }
TypeInfo::Basic(tok) => match parent_typ { if let Some(comment) = item.comment {
Some(TypeInfo::Callback { generics, .. }) => { builder = builder.with_children(&parse_moonwave_style_comment(&comment));
if let Some(generics) = generics {
// If the function that contains this arg has generic and a
// generic is the same as this token, we stringify it as any
if generics
.generics()
.iter()
.any(|g| g.to_string() == tok.token().to_string())
{
"any".to_string()
} else {
tok.token().to_string()
} }
} else { if let Some(args) = item.type_info.extract_args_normalized() {
tok.token().to_string() builder = builder.with_arg_types(&args);
} }
} if let TypeInfo::Table { fields, .. } = item.type_info {
_ => tok.token().to_string(), for field in fields.iter() {
if let TypeFieldKey::Name(name) = field.key() {
builder = builder.with_child(self.convert_parser_item_into_doc_item(
DefinitionsParserItem {
name: name.token().to_string(),
comment: find_token_moonwave_comment(name),
type_info: field.value().clone(),
}, },
TypeInfo::String(str) => str.token().to_string(), ));
TypeInfo::Boolean(_) => "boolean".to_string(),
TypeInfo::Callback { .. } => "function".to_string(),
TypeInfo::Optional { base, .. } => {
format!("{}?", simple_stringify_type_info(base, Some(typ)))
} }
TypeInfo::Table { .. } => "table".to_string(),
TypeInfo::Union { left, right, .. } => {
format!(
"{}{PIPE_SEPARATOR}{}",
simple_stringify_type_info(left, Some(typ)),
simple_stringify_type_info(right, Some(typ))
)
} }
// TODO: Stringify custom table types properly, these show up as basic tokens
// and we should be able to look up the real type using found top level items
_ => "...".to_string(),
} }
} builder.build().unwrap()
}
fn type_info_is_fn(typ: &TypeInfo) -> bool { #[allow(clippy::unnecessary_wraps)]
match typ { pub fn into_definition_items(mut self) -> Result<Vec<DefinitionsItem>> {
TypeInfo::Callback { .. } => true, let mut top_level_items = self.found_top_level_items.drain(..).collect::<Vec<_>>();
TypeInfo::Tuple { types, .. } => types.iter().all(type_info_is_fn), let results = top_level_items
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => { .drain(..)
type_info_is_fn(left) || type_info_is_fn(right) .map(|visitor_item| self.convert_parser_item_into_doc_item(visitor_item))
} .collect();
_ => false, Ok(results)
}
}
fn type_info_extract_args<'a>(
typ: &'a TypeInfo,
base: Vec<Vec<&'a TypeArgument>>,
) -> Vec<Vec<&'a TypeArgument>> {
match typ {
TypeInfo::Callback { arguments, .. } => {
let mut result = base.clone();
result.push(arguments.iter().collect::<Vec<_>>());
result
}
TypeInfo::Tuple { types, .. } => type_info_extract_args(
types.iter().next().expect("Function tuple type was empty"),
base.clone(),
),
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
let mut result = base.clone();
result = type_info_extract_args(left, result.clone());
result = type_info_extract_args(right, result.clone());
result
}
_ => base,
}
}
fn try_extract_normalized_function_args(typ: &TypeInfo) -> Option<Vec<String>> {
if type_info_is_fn(typ) {
let mut type_args_multi = type_info_extract_args(typ, Vec::new());
match type_args_multi.len() {
0 => None,
1 => Some(
// We got a normal function with some known list of args, and we will
// stringify the arg types into simple ones such as "function", "table", ..
type_args_multi
.pop()
.unwrap()
.iter()
.map(|type_arg| simple_stringify_type_info(type_arg.type_info(), Some(typ)))
.collect(),
),
_ => {
// We got a union or intersection function, meaning it has
// several different overloads that accept different args
let mut unified_args = Vec::new();
for index in 0..type_args_multi
.iter()
.fold(0, |acc, type_args| acc.max(type_args.len()))
{
// Gather function arg type strings for all
// of the different variants of this function
let mut type_arg_strings = type_args_multi
.iter()
.filter_map(|type_args| type_args.get(index))
.map(|type_arg| simple_stringify_type_info(type_arg.type_info(), Some(typ)))
.collect::<Vec<_>>();
if type_arg_strings.len() < type_args_multi.len() {
for _ in type_arg_strings.len()..type_args_multi.len() {
type_arg_strings.push("nil".to_string());
}
}
// Type arg strings may themselves be stringified to something like number | string so we
// will split that out to be able to handle it better with the following unification process
let mut type_arg_strings_sep = Vec::new();
for type_arg_string in type_arg_strings.drain(..) {
for typ_arg_string_inner in type_arg_string.split(PIPE_SEPARATOR) {
type_arg_strings_sep.push(typ_arg_string_inner.to_string());
}
}
// Find out if we have any nillable type, to know if we
// should make the entire arg type union nillable or not
let has_any_optional = type_arg_strings_sep
.iter()
.any(|s| s == "nil" || s.ends_with('?'));
// Filter out any nils or optional markers (?),
// we will add this back at the end if necessary
let mut type_arg_strings_non_nil = type_arg_strings_sep
.iter()
.filter(|s| *s != "nil")
.map(|s| s.trim_end_matches('?').to_string())
.collect::<Vec<_>>();
type_arg_strings_non_nil.sort(); // Need to sort for dedup
type_arg_strings_non_nil.dedup(); // Dedup to get rid of redundant types such as string | string
unified_args.push(if has_any_optional {
if type_arg_strings_non_nil.len() == 1 {
format!("{}?", type_arg_strings_non_nil.pop().unwrap())
} else {
format!("({})?", type_arg_strings_non_nil.join(PIPE_SEPARATOR))
}
} else {
type_arg_strings_non_nil.join(PIPE_SEPARATOR)
});
}
Some(unified_args)
}
}
} else {
None
} }
} }

View file

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use super::{ use super::{
builder::DefinitionsItemBuilder, item::DefinitionsItem, kind::DefinitionsItemKind, builder::DefinitionsItemBuilder, item::DefinitionsItem, kind::DefinitionsItemKind,
parser::parse_type_definitions_into_doc_items, parser::DefinitionsParser,
}; };
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@ -14,12 +14,17 @@ pub struct DefinitionsTree(DefinitionsItem);
#[allow(dead_code)] #[allow(dead_code)]
impl DefinitionsTree { impl DefinitionsTree {
pub fn from_type_definitions<S: AsRef<str>>(type_definitions_contents: S) -> Result<Self> { pub fn from_type_definitions<S: AsRef<str>>(type_definitions_contents: S) -> Result<Self> {
let top_level_items = parse_type_definitions_into_doc_items(type_definitions_contents) let mut parser = DefinitionsParser::new();
.context("Failed to visit type definitions AST")?; parser
.parse(type_definitions_contents)
.context("Failed to parse type definitions AST")?;
let top_level_definition_items = parser
.into_definition_items()
.context("Failed to convert parser items into definition items")?;
let root = DefinitionsItemBuilder::new() let root = DefinitionsItemBuilder::new()
.with_kind(DefinitionsItemKind::Root) .with_kind(DefinitionsItemKind::Root)
.with_name("<<<ROOT>>>") .with_name("<<<ROOT>>>")
.with_children(&top_level_items) .with_children(&top_level_definition_items)
.build()?; .build()?;
Ok(Self(root)) Ok(Self(root))
} }

View file

@ -0,0 +1,206 @@
use full_moon::ast::types::{TypeArgument, TypeInfo};
use super::kind::DefinitionsItemKind;
pub const PIPE_SEPARATOR: &str = " | ";
pub(super) trait TypeInfoExt {
fn is_fn(&self) -> bool;
fn to_definitions_kind(&self) -> DefinitionsItemKind;
fn stringify_simple(&self, parent_typ: Option<&TypeInfo>) -> String;
fn extract_args<'a>(&'a self, base: Vec<Vec<&'a TypeArgument>>) -> Vec<Vec<&'a TypeArgument>>;
fn extract_args_normalized(&self) -> Option<Vec<String>>;
}
impl TypeInfoExt for TypeInfo {
fn is_fn(&self) -> bool {
match self {
TypeInfo::Callback { .. } => true,
TypeInfo::Tuple { types, .. } => types.iter().all(Self::is_fn),
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
left.is_fn() || right.is_fn()
}
_ => false,
}
}
fn to_definitions_kind(&self) -> DefinitionsItemKind {
match self {
TypeInfo::Array { .. } | TypeInfo::Table { .. } => DefinitionsItemKind::Table,
TypeInfo::Basic(_) | TypeInfo::String(_) => DefinitionsItemKind::Property,
TypeInfo::Optional { base, .. } => Self::to_definitions_kind(base.as_ref()),
TypeInfo::Tuple { types, .. } => {
let mut kinds = types
.iter()
.map(Self::to_definitions_kind)
.collect::<Vec<_>>();
let kinds_all_the_same = kinds.windows(2).all(|w| w[0] == w[1]);
if kinds_all_the_same && !kinds.is_empty() {
kinds.pop().unwrap()
} else {
unimplemented!(
"Missing support for tuple with differing types in type definitions parser",
)
}
}
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
let kind_left = Self::to_definitions_kind(left.as_ref());
let kind_right = Self::to_definitions_kind(right.as_ref());
if kind_left == kind_right {
kind_left
} else {
unimplemented!(
"Missing support for union/intersection with differing types in type definitions parser",
)
}
}
typ if typ.is_fn() => DefinitionsItemKind::Function,
typ => unimplemented!(
"Missing support for TypeInfo in type definitions parser:\n{}",
typ.to_string()
),
}
}
fn stringify_simple(&self, parent_typ: Option<&TypeInfo>) -> String {
match self {
TypeInfo::Array { type_info, .. } => {
format!("{{ {} }}", type_info.as_ref().stringify_simple(Some(self)))
}
TypeInfo::Basic(tok) => match parent_typ {
Some(TypeInfo::Callback { generics, .. }) => {
if let Some(generics) = generics {
// If the function that contains this arg has generic and a
// generic is the same as this token, we stringify it as any
if generics
.generics()
.iter()
.any(|g| g.to_string() == tok.token().to_string())
{
"any".to_string()
} else {
tok.token().to_string()
}
} else {
tok.token().to_string()
}
}
_ => tok.token().to_string(),
},
TypeInfo::String(str) => str.token().to_string(),
TypeInfo::Boolean(_) => "boolean".to_string(),
TypeInfo::Callback { .. } => "function".to_string(),
TypeInfo::Optional { base, .. } => {
format!("{}?", base.as_ref().stringify_simple(Some(self)))
}
TypeInfo::Table { .. } => "table".to_string(),
TypeInfo::Union { left, right, .. } => {
format!(
"{}{PIPE_SEPARATOR}{}",
left.as_ref().stringify_simple(Some(self)),
right.as_ref().stringify_simple(Some(self))
)
}
// TODO: Stringify custom table types properly, these show up as basic tokens
// and we should be able to look up the real type using found top level items
_ => "...".to_string(),
}
}
fn extract_args<'a>(&'a self, base: Vec<Vec<&'a TypeArgument>>) -> Vec<Vec<&'a TypeArgument>> {
match self {
TypeInfo::Callback { arguments, .. } => {
let mut result = base.clone();
result.push(arguments.iter().collect::<Vec<_>>());
result
}
TypeInfo::Tuple { types, .. } => types
.iter()
.next()
.expect("Function tuple type was empty")
.extract_args(base.clone()),
TypeInfo::Union { left, right, .. } | TypeInfo::Intersection { left, right, .. } => {
let mut result = base.clone();
result = left.extract_args(result.clone());
result = right.extract_args(result.clone());
result
}
_ => base,
}
}
fn extract_args_normalized(&self) -> Option<Vec<String>> {
if self.is_fn() {
let mut type_args_multi = self.extract_args(Vec::new());
match type_args_multi.len() {
0 => None,
1 => Some(
// We got a normal function with some known list of args, and we will
// stringify the arg types into simple ones such as "function", "table", ..
type_args_multi
.pop()
.unwrap()
.iter()
.map(|type_arg| type_arg.type_info().stringify_simple(Some(self)))
.collect(),
),
_ => {
// We got a union or intersection function, meaning it has
// several different overloads that accept different args
let mut unified_args = Vec::new();
for index in 0..type_args_multi
.iter()
.fold(0, |acc, type_args| acc.max(type_args.len()))
{
// Gather function arg type strings for all
// of the different variants of this function
let mut type_arg_strings = type_args_multi
.iter()
.filter_map(|type_args| type_args.get(index))
.map(|type_arg| type_arg.type_info().stringify_simple(Some(self)))
.collect::<Vec<_>>();
if type_arg_strings.len() < type_args_multi.len() {
for _ in type_arg_strings.len()..type_args_multi.len() {
type_arg_strings.push("nil".to_string());
}
}
// Type arg strings may themselves be stringified to something like number | string so we
// will split that out to be able to handle it better with the following unification process
let mut type_arg_strings_sep = Vec::new();
for type_arg_string in type_arg_strings.drain(..) {
for typ_arg_string_inner in type_arg_string.split(PIPE_SEPARATOR) {
type_arg_strings_sep.push(typ_arg_string_inner.to_string());
}
}
// Find out if we have any nillable type, to know if we
// should make the entire arg type union nillable or not
let has_any_optional = type_arg_strings_sep
.iter()
.any(|s| s == "nil" || s.ends_with('?'));
// Filter out any nils or optional markers (?),
// we will add this back at the end if necessary
let mut type_arg_strings_non_nil = type_arg_strings_sep
.iter()
.filter(|s| *s != "nil")
.map(|s| s.trim_end_matches('?').to_string())
.collect::<Vec<_>>();
type_arg_strings_non_nil.sort(); // Need to sort for dedup
type_arg_strings_non_nil.dedup(); // Dedup to get rid of redundant types such as string | string
unified_args.push(if has_any_optional {
if type_arg_strings_non_nil.len() == 1 {
format!("{}?", type_arg_strings_non_nil.pop().unwrap())
} else {
format!("({})?", type_arg_strings_non_nil.join(PIPE_SEPARATOR))
}
} else {
type_arg_strings_non_nil.join(PIPE_SEPARATOR)
});
}
Some(unified_args)
}
}
} else {
None
}
}
}