luau/Analysis/src/TypeAttach.cpp

605 lines
20 KiB
C++
Raw Normal View History

// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/TypeAttach.h"
#include "Luau/Error.h"
#include "Luau/Module.h"
#include "Luau/RecursionCounter.h"
#include "Luau/Scope.h"
#include "Luau/ToString.h"
#include "Luau/TypeInfer.h"
#include "Luau/TypePack.h"
#include "Luau/Type.h"
Sync to upstream/release/576 (#928) * `ClassType` can now have an indexer defined on it. This allows custom types to be used in `t[x]` expressions. * Fixed search for closest executable breakpoint line. Previously, breakpoints might have been skipped in `else` blocks at the end of a function * Fixed how unification is performed for two optional types `a? <: b?`, previously it might have unified either 'a' or 'b' with 'nil'. Note that this fix is not enabled by default yet (see the list in `ExperimentalFlags.h`) In the new type solver, a concept of 'Type Families' has been introduced. Type families can be thought of as type aliases with custom type inference/reduction logic included with them. For example, we can have an `Add<T, U>` type family that will resolve the type that is the result of adding two values together. This will help type inference to figure out what 'T' and 'U' might be when explicit type annotations are not provided. In this update we don't define any type families, but they will be added in the near future. It is also possible for Luau embedders to define their own type families in the global/environment scope. Other changes include: * Fixed scope used to find out which generic types should be included in the function generic type list * Fixed a crash after cyclic bound types were created during unification And in native code generation (jit): * Use of arm64 target on M1 now requires macOS 13 * Entry into native code has been optimized. This is especially important for coroutine call/pcall performance as they involve going through a C call frame * LOP_LOADK(X) translation into IR has been improved to enable type tag/constant propagation * arm64 can use integer immediate values to synthesize floating-point values * x64 assembler removes duplicate 64bit numbers from the data section to save space * Linux `perf` can now be used to profile native Luau code (when running with --codegen-perf CLI argument)
2023-05-12 18:50:47 +01:00
#include "Luau/TypeFamily.h"
#include <string>
static char* allocateString(Luau::Allocator& allocator, std::string_view contents)
{
char* result = (char*)allocator.allocate(contents.size() + 1);
memcpy(result, contents.data(), contents.size());
result[contents.size()] = '\0';
return result;
}
template<typename... Data>
static char* allocateString(Luau::Allocator& allocator, const char* format, Data... data)
{
int len = snprintf(nullptr, 0, format, data...);
char* result = (char*)allocator.allocate(len + 1);
snprintf(result, len + 1, format, data...);
return result;
}
using SyntheticNames = std::unordered_map<const void*, char*>;
namespace Luau
{
static const char* getName(Allocator* allocator, SyntheticNames* syntheticNames, const GenericType& gen)
{
size_t s = syntheticNames->size();
char*& n = (*syntheticNames)[&gen];
if (!n)
{
std::string str = gen.explicitName ? gen.name : generateName(s);
n = static_cast<char*>(allocator->allocate(str.size() + 1));
strcpy(n, str.c_str());
}
return n;
}
static const char* getName(Allocator* allocator, SyntheticNames* syntheticNames, const GenericTypePack& gen)
{
size_t s = syntheticNames->size();
char*& n = (*syntheticNames)[&gen];
if (!n)
{
std::string str = gen.explicitName ? gen.name : generateName(s);
n = static_cast<char*>(allocator->allocate(str.size() + 1));
strcpy(n, str.c_str());
}
return n;
}
class TypeRehydrationVisitor
{
std::map<void*, int> seen;
int count = 0;
bool hasSeen(const void* tv)
{
void* ttv = const_cast<void*>(tv);
auto it = seen.find(ttv);
if (it != seen.end() && it->second < count)
return true;
seen[ttv] = count;
return false;
}
public:
TypeRehydrationVisitor(Allocator* alloc, SyntheticNames* syntheticNames, const TypeRehydrationOptions& options = TypeRehydrationOptions())
: allocator(alloc)
, syntheticNames(syntheticNames)
, options(options)
{
}
AstTypePack* rehydrate(TypePackId tp);
AstType* operator()(const PrimitiveType& ptv)
{
switch (ptv.type)
{
case PrimitiveType::NilType:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("nil"), std::nullopt, Location());
case PrimitiveType::Boolean:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("boolean"), std::nullopt, Location());
case PrimitiveType::Number:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("number"), std::nullopt, Location());
case PrimitiveType::String:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("string"), std::nullopt, Location());
case PrimitiveType::Thread:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("thread"), std::nullopt, Location());
Sync to upstream/release/603 (#1097) # What's changed? - Record the location of properties for table types (closes #802) - Implement stricter UTF-8 validations as per the RFC (https://github.com/luau-lang/rfcs/pull/1) - Implement `buffer` as a new type in both the old and new solvers. - Changed errors produced by some `buffer` builtins to be a bit more generic to avoid platform-dependent error messages. - Fixed a bug where `Unifier` would copy some persistent types, tripping some internal assertions. - Type checking rules on relational operators is now a little bit more lax. - Improve dead code elimination for some `if` statements with complex always-false conditions ## New type solver - Dataflow analysis now generates phi nodes on exit of branches. - Dataflow analysis avoids producing a new definition for locals or properties that are not owned by that loop. - If a function parameter has been constrained to `never`, report errors at all uses of that parameter within that function. - Switch to using the new `Luau::Set` to replace `std::unordered_set` to alleviate some poor allocation characteristics which was negatively affecting overall performance. - Subtyping can now report many failing reasons instead of just the first one that we happened to find during the test. - Subtyping now also report reasons for type pack mismatches. - When visiting `if` statements or expressions, the resulting context are the common terms in both branches. ## Native codegen - Implement support for `buffer` builtins to its IR for x64 and A64. - Optimized `table.insert` by not inserting a table barrier if it is fastcalled with a constant. ## Internal Contributors Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Alexander McCord <amccord@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: Arseny Kapoulkine <arseny@roblox.com> Co-authored-by: Aviral Goel <agoel@roblox.com> Co-authored-by: Lily Brown <lbrown@roblox.com> Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
2023-11-10 21:10:07 +00:00
case PrimitiveType::Buffer:
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("buffer"), std::nullopt, Location());
default:
return nullptr;
}
}
AstType* operator()(const BlockedType& btv)
2022-06-17 02:05:14 +01:00
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("*blocked*"), std::nullopt, Location());
2022-06-17 02:05:14 +01:00
}
AstType* operator()(const PendingExpansionType& petv)
2022-08-04 23:35:33 +01:00
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("*pending-expansion*"), std::nullopt, Location());
2022-08-04 23:35:33 +01:00
}
AstType* operator()(const SingletonType& stv)
{
if (const BooleanSingleton* bs = get<BooleanSingleton>(&stv))
return allocator->alloc<AstTypeSingletonBool>(Location(), bs->value);
else if (const StringSingleton* ss = get<StringSingleton>(&stv))
{
AstArray<char> value;
value.data = const_cast<char*>(ss->value.c_str());
value.size = strlen(value.data);
return allocator->alloc<AstTypeSingletonString>(Location(), value);
}
else
return nullptr;
}
AstType* operator()(const AnyType&)
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("any"), std::nullopt, Location());
}
AstType* operator()(const TableType& ttv)
{
RecursionCounter counter(&count);
if (ttv.name && options.bannedNames.find(*ttv.name) == options.bannedNames.end())
{
AstArray<AstTypeOrPack> parameters;
parameters.size = ttv.instantiatedTypeParams.size();
parameters.data = static_cast<AstTypeOrPack*>(allocator->allocate(sizeof(AstTypeOrPack) * parameters.size));
for (size_t i = 0; i < ttv.instantiatedTypeParams.size(); ++i)
{
parameters.data[i] = {Luau::visit(*this, ttv.instantiatedTypeParams[i]->ty), {}};
}
for (size_t i = 0; i < ttv.instantiatedTypePackParams.size(); ++i)
{
parameters.data[i] = {{}, rehydrate(ttv.instantiatedTypePackParams[i])};
}
return allocator->alloc<AstTypeReference>(
Location(), std::nullopt, AstName(ttv.name->c_str()), std::nullopt, Location(), parameters.size != 0, parameters);
}
if (hasSeen(&ttv))
{
if (ttv.name)
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName(ttv.name->c_str()), std::nullopt, Location());
else
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("<Cycle>"), std::nullopt, Location());
}
AstArray<AstTableProp> props;
props.size = ttv.props.size();
props.data = static_cast<AstTableProp*>(allocator->allocate(sizeof(AstTableProp) * props.size));
int idx = 0;
for (const auto& [propName, prop] : ttv.props)
{
RecursionCounter counter(&count);
char* name = allocateString(*allocator, propName);
props.data[idx].name = AstName(name);
props.data[idx].type = Luau::visit(*this, prop.type()->ty);
props.data[idx].location = Location();
idx++;
}
AstTableIndexer* indexer = nullptr;
if (ttv.indexer)
{
RecursionCounter counter(&count);
indexer = allocator->alloc<AstTableIndexer>();
indexer->indexType = Luau::visit(*this, ttv.indexer->indexType->ty);
indexer->resultType = Luau::visit(*this, ttv.indexer->indexResultType->ty);
}
return allocator->alloc<AstTypeTable>(Location(), props, indexer);
}
AstType* operator()(const MetatableType& mtv)
{
return Luau::visit(*this, mtv.table->ty);
}
AstType* operator()(const ClassType& ctv)
{
RecursionCounter counter(&count);
char* name = allocateString(*allocator, ctv.name);
if (!options.expandClassProps || hasSeen(&ctv) || count > 1)
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName{name}, std::nullopt, Location());
AstArray<AstTableProp> props;
props.size = ctv.props.size();
props.data = static_cast<AstTableProp*>(allocator->allocate(sizeof(AstTableProp) * props.size));
int idx = 0;
for (const auto& [propName, prop] : ctv.props)
{
char* name = allocateString(*allocator, propName);
props.data[idx].name = AstName{name};
props.data[idx].type = Luau::visit(*this, prop.type()->ty);
props.data[idx].location = Location();
idx++;
}
AstTableIndexer* indexer = nullptr;
Sync to upstream/release/603 (#1097) # What's changed? - Record the location of properties for table types (closes #802) - Implement stricter UTF-8 validations as per the RFC (https://github.com/luau-lang/rfcs/pull/1) - Implement `buffer` as a new type in both the old and new solvers. - Changed errors produced by some `buffer` builtins to be a bit more generic to avoid platform-dependent error messages. - Fixed a bug where `Unifier` would copy some persistent types, tripping some internal assertions. - Type checking rules on relational operators is now a little bit more lax. - Improve dead code elimination for some `if` statements with complex always-false conditions ## New type solver - Dataflow analysis now generates phi nodes on exit of branches. - Dataflow analysis avoids producing a new definition for locals or properties that are not owned by that loop. - If a function parameter has been constrained to `never`, report errors at all uses of that parameter within that function. - Switch to using the new `Luau::Set` to replace `std::unordered_set` to alleviate some poor allocation characteristics which was negatively affecting overall performance. - Subtyping can now report many failing reasons instead of just the first one that we happened to find during the test. - Subtyping now also report reasons for type pack mismatches. - When visiting `if` statements or expressions, the resulting context are the common terms in both branches. ## Native codegen - Implement support for `buffer` builtins to its IR for x64 and A64. - Optimized `table.insert` by not inserting a table barrier if it is fastcalled with a constant. ## Internal Contributors Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Alexander McCord <amccord@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: Arseny Kapoulkine <arseny@roblox.com> Co-authored-by: Aviral Goel <agoel@roblox.com> Co-authored-by: Lily Brown <lbrown@roblox.com> Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
2023-11-10 21:10:07 +00:00
if (ctv.indexer)
{
RecursionCounter counter(&count);
indexer = allocator->alloc<AstTableIndexer>();
indexer->indexType = Luau::visit(*this, ctv.indexer->indexType->ty);
indexer->resultType = Luau::visit(*this, ctv.indexer->indexResultType->ty);
}
return allocator->alloc<AstTypeTable>(Location(), props, indexer);
}
AstType* operator()(const FunctionType& ftv)
{
RecursionCounter counter(&count);
if (hasSeen(&ftv))
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("<Cycle>"), std::nullopt, Location());
2022-01-14 16:20:09 +00:00
AstArray<AstGenericType> generics;
generics.size = ftv.generics.size();
2022-01-14 16:20:09 +00:00
generics.data = static_cast<AstGenericType*>(allocator->allocate(sizeof(AstGenericType) * generics.size));
size_t numGenerics = 0;
for (auto it = ftv.generics.begin(); it != ftv.generics.end(); ++it)
{
if (auto gtv = get<GenericType>(*it))
2022-01-14 16:20:09 +00:00
generics.data[numGenerics++] = {AstName(gtv->name.c_str()), Location(), nullptr};
}
2022-01-14 16:20:09 +00:00
AstArray<AstGenericTypePack> genericPacks;
genericPacks.size = ftv.genericPacks.size();
2022-01-14 16:20:09 +00:00
genericPacks.data = static_cast<AstGenericTypePack*>(allocator->allocate(sizeof(AstGenericTypePack) * genericPacks.size));
size_t numGenericPacks = 0;
for (auto it = ftv.genericPacks.begin(); it != ftv.genericPacks.end(); ++it)
{
if (auto gtv = get<GenericTypePack>(*it))
2022-01-14 16:20:09 +00:00
genericPacks.data[numGenericPacks++] = {AstName(gtv->name.c_str()), Location(), nullptr};
}
AstArray<AstType*> argTypes;
const auto& [argVector, argTail] = flatten(ftv.argTypes);
argTypes.size = argVector.size();
argTypes.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * argTypes.size));
for (size_t i = 0; i < argTypes.size; ++i)
{
RecursionCounter counter(&count);
argTypes.data[i] = Luau::visit(*this, (argVector[i])->ty);
}
AstTypePack* argTailAnnotation = nullptr;
if (argTail)
argTailAnnotation = rehydrate(*argTail);
AstArray<std::optional<AstArgumentName>> argNames;
argNames.size = ftv.argNames.size();
argNames.data = static_cast<std::optional<AstArgumentName>*>(allocator->allocate(sizeof(std::optional<AstArgumentName>) * argNames.size));
size_t i = 0;
for (const auto& el : ftv.argNames)
{
std::optional<AstArgumentName>* arg = &argNames.data[i++];
if (el)
new (arg) std::optional<AstArgumentName>(AstArgumentName(AstName(el->name.c_str()), el->location));
else
new (arg) std::optional<AstArgumentName>();
}
AstArray<AstType*> returnTypes;
2022-06-17 02:05:14 +01:00
const auto& [retVector, retTail] = flatten(ftv.retTypes);
returnTypes.size = retVector.size();
returnTypes.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * returnTypes.size));
for (size_t i = 0; i < returnTypes.size; ++i)
{
RecursionCounter counter(&count);
returnTypes.data[i] = Luau::visit(*this, (retVector[i])->ty);
}
AstTypePack* retTailAnnotation = nullptr;
if (retTail)
retTailAnnotation = rehydrate(*retTail);
return allocator->alloc<AstTypeFunction>(
Location(), generics, genericPacks, AstTypeList{argTypes, argTailAnnotation}, argNames, AstTypeList{returnTypes, retTailAnnotation});
}
AstType* operator()(const Unifiable::Error&)
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("Unifiable<Error>"), std::nullopt, Location());
}
AstType* operator()(const GenericType& gtv)
{
return allocator->alloc<AstTypeReference>(
Location(), std::nullopt, AstName(getName(allocator, syntheticNames, gtv)), std::nullopt, Location());
}
AstType* operator()(const Unifiable::Bound<TypeId>& bound)
{
return Luau::visit(*this, bound.boundTo->ty);
}
AstType* operator()(const FreeType& ftv)
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("free"), std::nullopt, Location());
}
AstType* operator()(const UnionType& uv)
{
AstArray<AstType*> unionTypes;
unionTypes.size = uv.options.size();
unionTypes.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * unionTypes.size));
for (size_t i = 0; i < unionTypes.size; ++i)
{
unionTypes.data[i] = Luau::visit(*this, uv.options[i]->ty);
}
return allocator->alloc<AstTypeUnion>(Location(), unionTypes);
}
AstType* operator()(const IntersectionType& uv)
{
AstArray<AstType*> intersectionTypes;
intersectionTypes.size = uv.parts.size();
intersectionTypes.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * intersectionTypes.size));
for (size_t i = 0; i < intersectionTypes.size; ++i)
{
intersectionTypes.data[i] = Luau::visit(*this, uv.parts[i]->ty);
}
return allocator->alloc<AstTypeIntersection>(Location(), intersectionTypes);
}
AstType* operator()(const LazyType& ltv)
{
if (TypeId unwrapped = ltv.unwrapped.load())
return Luau::visit(*this, unwrapped->ty);
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("<Lazy?>"), std::nullopt, Location());
}
AstType* operator()(const UnknownType& ttv)
2022-07-08 02:22:39 +01:00
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName{"unknown"}, std::nullopt, Location());
2022-07-08 02:22:39 +01:00
}
AstType* operator()(const NeverType& ttv)
2022-07-08 02:22:39 +01:00
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName{"never"}, std::nullopt, Location());
2022-07-08 02:22:39 +01:00
}
AstType* operator()(const NegationType& ntv)
{
// FIXME: do the same thing we do with ErrorType
throw InternalCompilerError("Cannot convert NegationType into AstNode");
}
Sync to upstream/release/576 (#928) * `ClassType` can now have an indexer defined on it. This allows custom types to be used in `t[x]` expressions. * Fixed search for closest executable breakpoint line. Previously, breakpoints might have been skipped in `else` blocks at the end of a function * Fixed how unification is performed for two optional types `a? <: b?`, previously it might have unified either 'a' or 'b' with 'nil'. Note that this fix is not enabled by default yet (see the list in `ExperimentalFlags.h`) In the new type solver, a concept of 'Type Families' has been introduced. Type families can be thought of as type aliases with custom type inference/reduction logic included with them. For example, we can have an `Add<T, U>` type family that will resolve the type that is the result of adding two values together. This will help type inference to figure out what 'T' and 'U' might be when explicit type annotations are not provided. In this update we don't define any type families, but they will be added in the near future. It is also possible for Luau embedders to define their own type families in the global/environment scope. Other changes include: * Fixed scope used to find out which generic types should be included in the function generic type list * Fixed a crash after cyclic bound types were created during unification And in native code generation (jit): * Use of arm64 target on M1 now requires macOS 13 * Entry into native code has been optimized. This is especially important for coroutine call/pcall performance as they involve going through a C call frame * LOP_LOADK(X) translation into IR has been improved to enable type tag/constant propagation * arm64 can use integer immediate values to synthesize floating-point values * x64 assembler removes duplicate 64bit numbers from the data section to save space * Linux `perf` can now be used to profile native Luau code (when running with --codegen-perf CLI argument)
2023-05-12 18:50:47 +01:00
AstType* operator()(const TypeFamilyInstanceType& tfit)
{
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName{tfit.family->name.c_str()}, std::nullopt, Location());
}
private:
Allocator* allocator;
SyntheticNames* syntheticNames;
const TypeRehydrationOptions& options;
};
class TypePackRehydrationVisitor
{
public:
TypePackRehydrationVisitor(Allocator* allocator, SyntheticNames* syntheticNames, TypeRehydrationVisitor* typeVisitor)
: allocator(allocator)
, syntheticNames(syntheticNames)
, typeVisitor(typeVisitor)
{
LUAU_ASSERT(allocator);
LUAU_ASSERT(syntheticNames);
LUAU_ASSERT(typeVisitor);
}
AstTypePack* operator()(const BoundTypePack& btp) const
{
return Luau::visit(*this, btp.boundTo->ty);
}
AstTypePack* operator()(const BlockedTypePack& btp) const
{
return allocator->alloc<AstTypePackGeneric>(Location(), AstName("*blocked*"));
}
AstTypePack* operator()(const TypePack& tp) const
{
AstArray<AstType*> head;
head.size = tp.head.size();
head.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * tp.head.size()));
for (size_t i = 0; i < tp.head.size(); i++)
head.data[i] = Luau::visit(*typeVisitor, tp.head[i]->ty);
AstTypePack* tail = nullptr;
if (tp.tail)
tail = Luau::visit(*this, (*tp.tail)->ty);
return allocator->alloc<AstTypePackExplicit>(Location(), AstTypeList{head, tail});
}
AstTypePack* operator()(const VariadicTypePack& vtp) const
{
2022-04-15 00:57:43 +01:00
if (vtp.hidden)
return nullptr;
return allocator->alloc<AstTypePackVariadic>(Location(), Luau::visit(*typeVisitor, vtp.ty->ty));
}
AstTypePack* operator()(const GenericTypePack& gtp) const
{
return allocator->alloc<AstTypePackGeneric>(Location(), AstName(getName(allocator, syntheticNames, gtp)));
}
AstTypePack* operator()(const FreeTypePack& gtp) const
{
return allocator->alloc<AstTypePackGeneric>(Location(), AstName("free"));
}
AstTypePack* operator()(const Unifiable::Error&) const
{
return allocator->alloc<AstTypePackGeneric>(Location(), AstName("Unifiable<Error>"));
}
Sync to upstream/release/576 (#928) * `ClassType` can now have an indexer defined on it. This allows custom types to be used in `t[x]` expressions. * Fixed search for closest executable breakpoint line. Previously, breakpoints might have been skipped in `else` blocks at the end of a function * Fixed how unification is performed for two optional types `a? <: b?`, previously it might have unified either 'a' or 'b' with 'nil'. Note that this fix is not enabled by default yet (see the list in `ExperimentalFlags.h`) In the new type solver, a concept of 'Type Families' has been introduced. Type families can be thought of as type aliases with custom type inference/reduction logic included with them. For example, we can have an `Add<T, U>` type family that will resolve the type that is the result of adding two values together. This will help type inference to figure out what 'T' and 'U' might be when explicit type annotations are not provided. In this update we don't define any type families, but they will be added in the near future. It is also possible for Luau embedders to define their own type families in the global/environment scope. Other changes include: * Fixed scope used to find out which generic types should be included in the function generic type list * Fixed a crash after cyclic bound types were created during unification And in native code generation (jit): * Use of arm64 target on M1 now requires macOS 13 * Entry into native code has been optimized. This is especially important for coroutine call/pcall performance as they involve going through a C call frame * LOP_LOADK(X) translation into IR has been improved to enable type tag/constant propagation * arm64 can use integer immediate values to synthesize floating-point values * x64 assembler removes duplicate 64bit numbers from the data section to save space * Linux `perf` can now be used to profile native Luau code (when running with --codegen-perf CLI argument)
2023-05-12 18:50:47 +01:00
AstTypePack* operator()(const TypeFamilyInstanceTypePack& tfitp) const
{
return allocator->alloc<AstTypePackGeneric>(Location(), AstName(tfitp.family->name.c_str()));
}
private:
Allocator* allocator;
SyntheticNames* syntheticNames;
TypeRehydrationVisitor* typeVisitor;
};
AstTypePack* TypeRehydrationVisitor::rehydrate(TypePackId tp)
{
TypePackRehydrationVisitor tprv(allocator, syntheticNames, this);
return Luau::visit(tprv, tp->ty);
}
class TypeAttacher : public AstVisitor
{
public:
TypeAttacher(Module& checker, Luau::Allocator* alloc)
: module(checker)
, allocator(alloc)
{
}
ScopePtr getScope(const Location& loc)
{
Location scopeLocation;
ScopePtr scope = nullptr;
for (const auto& s : module.scopes)
{
if (s.first.encloses(loc))
{
if (!scope || scopeLocation.encloses(s.first))
{
scopeLocation = s.first;
scope = s.second;
}
}
}
return scope;
}
AstType* typeAst(std::optional<TypeId> type)
{
if (!type)
return nullptr;
return Luau::visit(TypeRehydrationVisitor(allocator, &syntheticNames), (*type)->ty);
}
AstArray<Luau::AstType*> typeAstPack(TypePackId type)
{
const auto& [v, tail] = flatten(type);
AstArray<AstType*> result;
result.size = v.size();
result.data = static_cast<AstType**>(allocator->allocate(sizeof(AstType*) * v.size()));
for (size_t i = 0; i < v.size(); ++i)
{
result.data[i] = Luau::visit(TypeRehydrationVisitor(allocator, &syntheticNames), v[i]->ty);
}
return result;
}
virtual bool visit(AstStatLocal* al) override
{
for (size_t i = 0; i < al->vars.size; ++i)
{
visitLocal(al->vars.data[i]);
}
return true;
}
virtual bool visitLocal(AstLocal* local)
{
AstType* annotation = local->annotation;
if (!annotation)
{
if (auto scope = getScope(local->location))
{
if (auto result = scope->lookup(local))
local->annotation = typeAst(*result);
}
}
return true;
}
virtual bool visit(AstExprLocal* al) override
{
return visitLocal(al->local);
}
2022-04-21 22:44:27 +01:00
virtual bool visit(AstStatFor* stat) override
{
visitLocal(stat->var);
return true;
}
virtual bool visit(AstStatForIn* stat) override
{
for (size_t i = 0; i < stat->vars.size; ++i)
visitLocal(stat->vars.data[i]);
return true;
}
virtual bool visit(AstExprFunction* fn) override
{
// TODO: add generics if the inferred type of the function is generic CLI-39908
for (size_t i = 0; i < fn->args.size; ++i)
{
AstLocal* arg = fn->args.data[i];
visitLocal(arg);
}
2022-02-18 01:18:01 +00:00
if (!fn->returnAnnotation)
{
if (auto result = getScope(fn->body->location))
{
TypePackId ret = result->returnType;
AstTypePack* variadicAnnotation = nullptr;
const auto& [v, tail] = flatten(ret);
if (tail)
variadicAnnotation = TypeRehydrationVisitor(allocator, &syntheticNames).rehydrate(*tail);
fn->returnAnnotation = AstTypeList{typeAstPack(ret), variadicAnnotation};
}
}
return true;
}
private:
Module& module;
Allocator* allocator;
SyntheticNames syntheticNames;
};
void attachTypeData(SourceModule& source, Module& result)
{
TypeAttacher ta(result, source.allocator.get());
source.root->visit(&ta);
}
AstType* rehydrateAnnotation(TypeId type, Allocator* allocator, const TypeRehydrationOptions& options)
{
SyntheticNames syntheticNames;
return Luau::visit(TypeRehydrationVisitor(allocator, &syntheticNames, options), type->ty);
}
} // namespace Luau