mirror of
https://github.com/luau-lang/luau.git
synced 2024-12-12 21:10:37 +00:00
Sync to upstream/release/560 (#810)
* For autocomplete, additional information is included in Scope for type alias name locations and names of imported modules * Improved autocomplete suggestions in 'for' and 'while' loop headers * String match functions return types are now optional strings and numbers because match is not guaranteed at runtime * Fixed build issue on gcc 11 and up (Fixes https://github.com/Roblox/luau/issues/806)
This commit is contained in:
parent
729bc44729
commit
4a2e8013c7
68 changed files with 3107 additions and 1260 deletions
|
@ -64,7 +64,7 @@ private:
|
|||
};
|
||||
|
||||
std::vector<AstNode*> findAncestryAtPositionForAutocomplete(const SourceModule& source, Position pos);
|
||||
std::vector<AstNode*> findAstAncestryOfPosition(const SourceModule& source, Position pos);
|
||||
std::vector<AstNode*> findAstAncestryOfPosition(const SourceModule& source, Position pos, bool includeTypes = false);
|
||||
AstNode* findNodeAtPosition(const SourceModule& source, Position pos);
|
||||
AstExpr* findExprAtPosition(const SourceModule& source, Position pos);
|
||||
ScopePtr findScopeAtPosition(const Module& module, Position pos);
|
||||
|
|
|
@ -240,20 +240,28 @@ struct ConstraintGraphBuilder
|
|||
* Resolves a type from its AST annotation.
|
||||
* @param scope the scope that the type annotation appears within.
|
||||
* @param ty the AST annotation to resolve.
|
||||
* @param topLevel whether the annotation is a "top-level" annotation.
|
||||
* @param inTypeArguments whether we are resolving a type that's contained within type arguments, `<...>`.
|
||||
* @return the type of the AST annotation.
|
||||
**/
|
||||
TypeId resolveType(const ScopePtr& scope, AstType* ty, bool topLevel = false);
|
||||
TypeId resolveType(const ScopePtr& scope, AstType* ty, bool inTypeArguments);
|
||||
|
||||
/**
|
||||
* Resolves a type pack from its AST annotation.
|
||||
* @param scope the scope that the type annotation appears within.
|
||||
* @param tp the AST annotation to resolve.
|
||||
* @param inTypeArguments whether we are resolving a type that's contained within type arguments, `<...>`.
|
||||
* @return the type pack of the AST annotation.
|
||||
**/
|
||||
TypePackId resolveTypePack(const ScopePtr& scope, AstTypePack* tp);
|
||||
TypePackId resolveTypePack(const ScopePtr& scope, AstTypePack* tp, bool inTypeArguments);
|
||||
|
||||
TypePackId resolveTypePack(const ScopePtr& scope, const AstTypeList& list);
|
||||
/**
|
||||
* Resolves a type pack from its AST annotation.
|
||||
* @param scope the scope that the type annotation appears within.
|
||||
* @param list the AST annotation to resolve.
|
||||
* @param inTypeArguments whether we are resolving a type that's contained within type arguments, `<...>`.
|
||||
* @return the type pack of the AST annotation.
|
||||
**/
|
||||
TypePackId resolveTypePack(const ScopePtr& scope, const AstTypeList& list, bool inTypeArguments);
|
||||
|
||||
std::vector<std::pair<Name, GenericTypeDefinition>> createGenerics(const ScopePtr& scope, AstArray<AstGenericType> generics);
|
||||
std::vector<std::pair<Name, GenericTypePackDefinition>> createGenericPacks(const ScopePtr& scope, AstArray<AstGenericTypePack> packs);
|
||||
|
|
|
@ -111,7 +111,7 @@ struct ConstraintSolver
|
|||
bool tryDispatch(const FunctionCallConstraint& c, NotNull<const Constraint> constraint);
|
||||
bool tryDispatch(const PrimitiveTypeConstraint& c, NotNull<const Constraint> constraint);
|
||||
bool tryDispatch(const HasPropConstraint& c, NotNull<const Constraint> constraint);
|
||||
bool tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint);
|
||||
bool tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||
bool tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull<const Constraint> constraint);
|
||||
|
||||
// for a, ... in some_table do
|
||||
|
|
|
@ -43,6 +43,8 @@ struct Scope
|
|||
std::unordered_map<Name, TypeFun> exportedTypeBindings;
|
||||
std::unordered_map<Name, TypeFun> privateTypeBindings;
|
||||
std::unordered_map<Name, Location> typeAliasLocations;
|
||||
std::unordered_map<Name, Location> typeAliasNameLocations;
|
||||
std::unordered_map<Name, ModuleName> importedModules; // Mapping from the name in the require statement to the internal moduleName.
|
||||
std::unordered_map<Name, std::unordered_map<Name, TypeFun>> importedTypeBindings;
|
||||
|
||||
DenseHashSet<Name> builtinTypeNames{""};
|
||||
|
|
|
@ -132,7 +132,9 @@ std::optional<std::string> getFunctionNameAsString(const AstExpr& expr);
|
|||
// It could be useful to see the text representation of a type during a debugging session instead of exploring the content of the class
|
||||
// These functions will dump the type to stdout and can be evaluated in Watch/Immediate windows or as gdb/lldb expression
|
||||
std::string dump(TypeId ty);
|
||||
std::string dump(const std::optional<TypeId>& ty);
|
||||
std::string dump(TypePackId ty);
|
||||
std::string dump(const std::optional<TypePackId>& ty);
|
||||
std::string dump(const Constraint& c);
|
||||
|
||||
std::string dump(const std::shared_ptr<Scope>& scope, const char* name);
|
||||
|
|
|
@ -657,8 +657,11 @@ public:
|
|||
const TypeId unknownType;
|
||||
const TypeId neverType;
|
||||
const TypeId errorType;
|
||||
const TypeId falsyType; // No type binding!
|
||||
const TypeId truthyType; // No type binding!
|
||||
const TypeId falsyType;
|
||||
const TypeId truthyType;
|
||||
|
||||
const TypeId optionalNumberType;
|
||||
const TypeId optionalStringType;
|
||||
|
||||
const TypePackId anyTypePack;
|
||||
const TypePackId neverTypePack;
|
||||
|
|
|
@ -9,11 +9,28 @@
|
|||
namespace Luau
|
||||
{
|
||||
|
||||
/// If it's desirable to allocate into a different arena than the TypeReduction instance you have, you will need
|
||||
/// to create a temporary TypeReduction in that case. This is because TypeReduction caches the reduced type.
|
||||
namespace detail
|
||||
{
|
||||
template<typename T>
|
||||
struct ReductionContext
|
||||
{
|
||||
T type = nullptr;
|
||||
bool irreducible = false;
|
||||
};
|
||||
} // namespace detail
|
||||
|
||||
struct TypeReductionOptions
|
||||
{
|
||||
/// If it's desirable for type reduction to allocate into a different arena than the TypeReduction instance you have, you will need
|
||||
/// to create a temporary TypeReduction in that case, and set [`TypeReductionOptions::allowTypeReductionsFromOtherArenas`] to true.
|
||||
/// This is because TypeReduction caches the reduced type.
|
||||
bool allowTypeReductionsFromOtherArenas = false;
|
||||
};
|
||||
|
||||
struct TypeReduction
|
||||
{
|
||||
explicit TypeReduction(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> handle);
|
||||
explicit TypeReduction(
|
||||
NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> handle, const TypeReductionOptions& opts = {});
|
||||
|
||||
std::optional<TypeId> reduce(TypeId ty);
|
||||
std::optional<TypePackId> reduce(TypePackId tp);
|
||||
|
@ -23,12 +40,10 @@ private:
|
|||
NotNull<TypeArena> arena;
|
||||
NotNull<BuiltinTypes> builtinTypes;
|
||||
NotNull<struct InternalErrorReporter> handle;
|
||||
TypeReductionOptions options;
|
||||
|
||||
DenseHashMap<TypeId, TypeId> cachedTypes{nullptr};
|
||||
DenseHashMap<TypePackId, TypePackId> cachedTypePacks{nullptr};
|
||||
|
||||
std::pair<std::optional<TypeId>, bool> reduceImpl(TypeId ty);
|
||||
std::pair<std::optional<TypePackId>, bool> reduceImpl(TypePackId tp);
|
||||
DenseHashMap<TypeId, detail::ReductionContext<TypeId>> memoizedTypes{nullptr};
|
||||
DenseHashMap<TypePackId, detail::ReductionContext<TypePackId>> memoizedTypePacks{nullptr};
|
||||
|
||||
// Computes an *estimated length* of the cartesian product of the given type.
|
||||
size_t cartesianProductSize(TypeId ty) const;
|
||||
|
|
|
@ -318,7 +318,10 @@ struct GenericTypeVisitor
|
|||
}
|
||||
}
|
||||
else if (auto ntv = get<NegationType>(ty))
|
||||
visit(ty, *ntv);
|
||||
{
|
||||
if (visit(ty, *ntv))
|
||||
traverse(ntv->ty);
|
||||
}
|
||||
else if (!FFlag::LuauCompleteVisitor)
|
||||
return visit_detail::unsee(seen, ty);
|
||||
else
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include <algorithm>
|
||||
|
||||
LUAU_FASTFLAG(LuauCompleteTableKeysBetter);
|
||||
LUAU_FASTFLAGVARIABLE(SupportTypeAliasGoToDeclaration, false);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
@ -183,14 +184,31 @@ struct FindFullAncestry final : public AstVisitor
|
|||
std::vector<AstNode*> nodes;
|
||||
Position pos;
|
||||
Position documentEnd;
|
||||
bool includeTypes = false;
|
||||
|
||||
explicit FindFullAncestry(Position pos, Position documentEnd)
|
||||
explicit FindFullAncestry(Position pos, Position documentEnd, bool includeTypes = false)
|
||||
: pos(pos)
|
||||
, documentEnd(documentEnd)
|
||||
, includeTypes(includeTypes)
|
||||
{
|
||||
}
|
||||
|
||||
bool visit(AstNode* node)
|
||||
bool visit(AstType* type) override
|
||||
{
|
||||
if (FFlag::SupportTypeAliasGoToDeclaration)
|
||||
{
|
||||
if (includeTypes)
|
||||
return visit(static_cast<AstNode*>(type));
|
||||
else
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
return AstVisitor::visit(type);
|
||||
}
|
||||
}
|
||||
|
||||
bool visit(AstNode* node) override
|
||||
{
|
||||
if (node->location.contains(pos))
|
||||
{
|
||||
|
@ -220,13 +238,13 @@ std::vector<AstNode*> findAncestryAtPositionForAutocomplete(const SourceModule&
|
|||
return finder.ancestry;
|
||||
}
|
||||
|
||||
std::vector<AstNode*> findAstAncestryOfPosition(const SourceModule& source, Position pos)
|
||||
std::vector<AstNode*> findAstAncestryOfPosition(const SourceModule& source, Position pos, bool includeTypes)
|
||||
{
|
||||
const Position end = source.root->location.end;
|
||||
if (pos > end)
|
||||
pos = end;
|
||||
|
||||
FindFullAncestry finder(pos, end);
|
||||
FindFullAncestry finder(pos, end, includeTypes);
|
||||
source.root->visit(&finder);
|
||||
return finder.nodes;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,9 @@
|
|||
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompleteTableKeysBetter, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauFixAutocompleteInIf, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauFixAutocompleteInWhile, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauFixAutocompleteInFor, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauAutocompleteStringContent, false);
|
||||
|
||||
static const std::unordered_set<std::string> kStatementStartingKeywords = {
|
||||
"while", "if", "local", "repeat", "function", "do", "for", "return", "break", "continue", "type", "export"};
|
||||
|
@ -1265,6 +1268,9 @@ static bool isSimpleInterpolatedString(const AstNode* node)
|
|||
|
||||
static std::optional<std::string> getStringContents(const AstNode* node)
|
||||
{
|
||||
if (!FFlag::LuauAutocompleteStringContent)
|
||||
return std::nullopt;
|
||||
|
||||
if (const AstExprConstantString* string = node->as<AstExprConstantString>())
|
||||
{
|
||||
return std::string(string->value.data, string->value.size);
|
||||
|
@ -1314,8 +1320,7 @@ static std::optional<AutocompleteEntryMap> autocompleteStringParams(const Source
|
|||
|
||||
std::optional<std::string> candidateString = getStringContents(nodes.back());
|
||||
|
||||
auto performCallback = [&](const FunctionType* funcType) -> std::optional<AutocompleteEntryMap>
|
||||
{
|
||||
auto performCallback = [&](const FunctionType* funcType) -> std::optional<AutocompleteEntryMap> {
|
||||
for (const std::string& tag : funcType->tags)
|
||||
{
|
||||
if (std::optional<AutocompleteEntryMap> ret = callback(tag, getMethodContainingClass(module, candidate->func), candidateString))
|
||||
|
@ -1349,6 +1354,15 @@ static std::optional<AutocompleteEntryMap> autocompleteStringParams(const Source
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
static AutocompleteResult autocompleteWhileLoopKeywords(std::vector<AstNode*> ancestry)
|
||||
{
|
||||
AutocompleteEntryMap ret;
|
||||
ret["do"] = {AutocompleteEntryKind::Keyword};
|
||||
ret["and"] = {AutocompleteEntryKind::Keyword};
|
||||
ret["or"] = {AutocompleteEntryKind::Keyword};
|
||||
return {std::move(ret), std::move(ancestry), AutocompleteContext::Keyword};
|
||||
}
|
||||
|
||||
static AutocompleteResult autocomplete(const SourceModule& sourceModule, const ModulePtr& module, NotNull<BuiltinTypes> builtinTypes,
|
||||
TypeArena* typeArena, Scope* globalScope, Position position, StringCompletionCallback callback)
|
||||
{
|
||||
|
@ -1406,6 +1420,17 @@ static AutocompleteResult autocomplete(const SourceModule& sourceModule, const M
|
|||
else if (AstStatFor* statFor = extractStat<AstStatFor>(ancestry))
|
||||
{
|
||||
if (!statFor->hasDo || position < statFor->doLocation.begin)
|
||||
{
|
||||
if (FFlag::LuauFixAutocompleteInFor)
|
||||
{
|
||||
if (statFor->from->location.containsClosed(position) || statFor->to->location.containsClosed(position) ||
|
||||
(statFor->step && statFor->step->location.containsClosed(position)))
|
||||
return autocompleteExpression(sourceModule, *module, builtinTypes, typeArena, ancestry, position);
|
||||
|
||||
if (!statFor->from->is<AstExprError>() && !statFor->to->is<AstExprError>() && (!statFor->step || !statFor->step->is<AstExprError>()))
|
||||
return {{{"do", AutocompleteEntry{AutocompleteEntryKind::Keyword}}}, ancestry, AutocompleteContext::Keyword};
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!statFor->from->is<AstExprError>() && !statFor->to->is<AstExprError>() && (!statFor->step || !statFor->step->is<AstExprError>()))
|
||||
return {{{"do", AutocompleteEntry{AutocompleteEntryKind::Keyword}}}, ancestry, AutocompleteContext::Keyword};
|
||||
|
@ -1413,7 +1438,7 @@ static AutocompleteResult autocomplete(const SourceModule& sourceModule, const M
|
|||
if (statFor->from->location.containsClosed(position) || statFor->to->location.containsClosed(position) ||
|
||||
(statFor->step && statFor->step->location.containsClosed(position)))
|
||||
return autocompleteExpression(sourceModule, *module, builtinTypes, typeArena, ancestry, position);
|
||||
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
|
@ -1463,7 +1488,16 @@ static AutocompleteResult autocomplete(const SourceModule& sourceModule, const M
|
|||
else if (AstStatWhile* statWhile = parent->as<AstStatWhile>(); node->is<AstStatBlock>() && statWhile)
|
||||
{
|
||||
if (!statWhile->hasDo && !statWhile->condition->is<AstStatError>() && position > statWhile->condition->location.end)
|
||||
{
|
||||
if (FFlag::LuauFixAutocompleteInWhile)
|
||||
{
|
||||
return autocompleteWhileLoopKeywords(ancestry);
|
||||
}
|
||||
else
|
||||
{
|
||||
return {{{"do", AutocompleteEntry{AutocompleteEntryKind::Keyword}}}, ancestry, AutocompleteContext::Keyword};
|
||||
}
|
||||
}
|
||||
|
||||
if (!statWhile->hasDo || position < statWhile->doLocation.begin)
|
||||
return autocompleteExpression(sourceModule, *module, builtinTypes, typeArena, ancestry, position);
|
||||
|
@ -1472,9 +1506,20 @@ static AutocompleteResult autocomplete(const SourceModule& sourceModule, const M
|
|||
return {autocompleteStatement(sourceModule, *module, ancestry, position), ancestry, AutocompleteContext::Statement};
|
||||
}
|
||||
|
||||
else if (AstStatWhile* statWhile = extractStat<AstStatWhile>(ancestry); statWhile && !statWhile->hasDo)
|
||||
else if (AstStatWhile* statWhile = extractStat<AstStatWhile>(ancestry);
|
||||
FFlag::LuauFixAutocompleteInWhile ? (statWhile && (!statWhile->hasDo || statWhile->doLocation.containsClosed(position)) &&
|
||||
statWhile->condition && !statWhile->condition->location.containsClosed(position))
|
||||
: (statWhile && !statWhile->hasDo))
|
||||
{
|
||||
if (FFlag::LuauFixAutocompleteInWhile)
|
||||
{
|
||||
return autocompleteWhileLoopKeywords(ancestry);
|
||||
}
|
||||
else
|
||||
{
|
||||
return {{{"do", AutocompleteEntry{AutocompleteEntryKind::Keyword}}}, ancestry, AutocompleteContext::Keyword};
|
||||
|
||||
}
|
||||
}
|
||||
else if (AstStatIf* statIf = node->as<AstStatIf>(); statIf && !statIf->elseLocation.has_value())
|
||||
{
|
||||
return {{{"else", AutocompleteEntry{AutocompleteEntryKind::Keyword}}, {"elseif", AutocompleteEntry{AutocompleteEntryKind::Keyword}}},
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
|
||||
#include <algorithm>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauSetMetaTableArgsCheck, false)
|
||||
LUAU_FASTFLAG(LuauUnknownAndNeverType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauBuiltInMetatableNoBadSynthetic, false)
|
||||
LUAU_FASTFLAG(LuauReportShadowedTypeAlias)
|
||||
|
@ -583,7 +582,7 @@ static std::optional<WithPredicate<TypePackId>> magicFunctionSetMetaTable(
|
|||
|
||||
TypeId mtTy = arena.addType(mtv);
|
||||
|
||||
if (FFlag::LuauSetMetaTableArgsCheck && expr.args.size < 1)
|
||||
if (expr.args.size < 1)
|
||||
{
|
||||
if (FFlag::LuauUnknownAndNeverType)
|
||||
return std::nullopt;
|
||||
|
@ -591,7 +590,7 @@ static std::optional<WithPredicate<TypePackId>> magicFunctionSetMetaTable(
|
|||
return WithPredicate<TypePackId>{};
|
||||
}
|
||||
|
||||
if (!FFlag::LuauSetMetaTableArgsCheck || !expr.self)
|
||||
if (!expr.self)
|
||||
{
|
||||
AstExpr* targetExpr = expr.args.data[0];
|
||||
if (AstExprLocal* targetLocal = targetExpr->as<AstExprLocal>())
|
||||
|
|
|
@ -16,6 +16,7 @@ LUAU_FASTFLAG(DebugLuauLogSolverToJson);
|
|||
LUAU_FASTFLAG(DebugLuauMagicTypes);
|
||||
LUAU_FASTFLAG(LuauNegatedClassTypes);
|
||||
LUAU_FASTFLAG(LuauScopelessModule);
|
||||
LUAU_FASTFLAG(SupportTypeAliasGoToDeclaration);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
@ -418,7 +419,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
|||
TypeId ty = nullptr;
|
||||
|
||||
if (local->annotation)
|
||||
ty = resolveType(scope, local->annotation, /* topLevel */ true);
|
||||
ty = resolveType(scope, local->annotation, /* inTypeArguments */ false);
|
||||
|
||||
varTypes.push_back(ty);
|
||||
}
|
||||
|
@ -521,8 +522,12 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
|||
const Name name{local->vars.data[i]->name.value};
|
||||
|
||||
if (ModulePtr module = moduleResolver->getModule(moduleInfo->name))
|
||||
{
|
||||
scope->importedTypeBindings[name] =
|
||||
FFlag::LuauScopelessModule ? module->exportedTypeBindings : module->getModuleScope()->exportedTypeBindings;
|
||||
if (FFlag::SupportTypeAliasGoToDeclaration)
|
||||
scope->importedModules[name] = moduleName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -775,7 +780,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alia
|
|||
}
|
||||
|
||||
ScopePtr resolvingScope = *defnIt;
|
||||
TypeId ty = resolveType(resolvingScope, alias->type, /* topLevel */ true);
|
||||
TypeId ty = resolveType(resolvingScope, alias->type, /* inTypeArguments */ false);
|
||||
|
||||
if (alias->exported)
|
||||
{
|
||||
|
@ -798,7 +803,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal*
|
|||
{
|
||||
LUAU_ASSERT(global->type);
|
||||
|
||||
TypeId globalTy = resolveType(scope, global->type);
|
||||
TypeId globalTy = resolveType(scope, global->type, /* inTypeArguments */ false);
|
||||
Name globalName(global->name.value);
|
||||
|
||||
module->declaredGlobals[globalName] = globalTy;
|
||||
|
@ -854,7 +859,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* d
|
|||
for (const AstDeclaredClassProp& prop : declaredClass->props)
|
||||
{
|
||||
Name propName(prop.name.value);
|
||||
TypeId propTy = resolveType(scope, prop.ty);
|
||||
TypeId propTy = resolveType(scope, prop.ty, /* inTypeArguments */ false);
|
||||
|
||||
bool assignToMetatable = isMetamethod(propName);
|
||||
|
||||
|
@ -937,8 +942,8 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareFunction
|
|||
if (!generics.empty() || !genericPacks.empty())
|
||||
funScope = childScope(global, scope);
|
||||
|
||||
TypePackId paramPack = resolveTypePack(funScope, global->params);
|
||||
TypePackId retPack = resolveTypePack(funScope, global->retTypes);
|
||||
TypePackId paramPack = resolveTypePack(funScope, global->params, /* inTypeArguments */ false);
|
||||
TypePackId retPack = resolveTypePack(funScope, global->retTypes, /* inTypeArguments */ false);
|
||||
TypeId fnType = arena->addType(FunctionType{TypeLevel{}, funScope.get(), std::move(genericTys), std::move(genericTps), paramPack, retPack});
|
||||
FunctionType* ftv = getMutable<FunctionType>(fnType);
|
||||
|
||||
|
@ -1501,7 +1506,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprIfElse* if
|
|||
Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprTypeAssertion* typeAssert)
|
||||
{
|
||||
check(scope, typeAssert->expr, std::nullopt);
|
||||
return Inference{resolveType(scope, typeAssert->annotation)};
|
||||
return Inference{resolveType(scope, typeAssert->annotation, /* inTypeArguments */ false)};
|
||||
}
|
||||
|
||||
std::tuple<TypeId, TypeId, ConnectiveId> ConstraintGraphBuilder::checkBinary(
|
||||
|
@ -1563,7 +1568,7 @@ std::tuple<TypeId, TypeId, ConnectiveId> ConstraintGraphBuilder::checkBinary(
|
|||
TypeId ty = follow(typeFun->type);
|
||||
|
||||
// We're only interested in the root class of any classes.
|
||||
if (auto ctv = get<ClassType>(ty); !ctv || !ctv->parent)
|
||||
if (auto ctv = get<ClassType>(ty); !ctv || (FFlag::LuauNegatedClassTypes ? (ctv->parent == builtinTypes->classType) : !ctv->parent))
|
||||
discriminantTy = ty;
|
||||
}
|
||||
|
||||
|
@ -1618,39 +1623,6 @@ TypePackId ConstraintGraphBuilder::checkLValues(const ScopePtr& scope, AstArray<
|
|||
return arena->addTypePack(std::move(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* If the expr is a dotted set of names, and if the root symbol refers to an
|
||||
* unsealed table, return that table type, plus the indeces that follow as a
|
||||
* vector.
|
||||
*/
|
||||
static std::optional<std::pair<Symbol, std::vector<const char*>>> extractDottedName(AstExpr* expr)
|
||||
{
|
||||
std::vector<const char*> names;
|
||||
|
||||
while (expr)
|
||||
{
|
||||
if (auto global = expr->as<AstExprGlobal>())
|
||||
{
|
||||
std::reverse(begin(names), end(names));
|
||||
return std::pair{global->name, std::move(names)};
|
||||
}
|
||||
else if (auto local = expr->as<AstExprLocal>())
|
||||
{
|
||||
std::reverse(begin(names), end(names));
|
||||
return std::pair{local->local, std::move(names)};
|
||||
}
|
||||
else if (auto indexName = expr->as<AstExprIndexName>())
|
||||
{
|
||||
names.push_back(indexName->index.value);
|
||||
expr = indexName->expr;
|
||||
}
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is mostly about identifying properties that are being inserted into unsealed tables.
|
||||
*
|
||||
|
@ -1671,13 +1643,38 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
|
|||
else if (!expr->is<AstExprIndexName>())
|
||||
return check(scope, expr).ty;
|
||||
|
||||
auto dottedPath = extractDottedName(expr);
|
||||
if (!dottedPath)
|
||||
Symbol sym;
|
||||
std::vector<std::string> segments;
|
||||
std::vector<AstExpr*> exprs;
|
||||
|
||||
AstExpr* e = expr;
|
||||
while (e)
|
||||
{
|
||||
if (auto global = e->as<AstExprGlobal>())
|
||||
{
|
||||
sym = global->name;
|
||||
break;
|
||||
}
|
||||
else if (auto local = e->as<AstExprLocal>())
|
||||
{
|
||||
sym = local->local;
|
||||
break;
|
||||
}
|
||||
else if (auto indexName = e->as<AstExprIndexName>())
|
||||
{
|
||||
segments.push_back(indexName->index.value);
|
||||
exprs.push_back(e);
|
||||
e = indexName->expr;
|
||||
}
|
||||
else
|
||||
return check(scope, expr).ty;
|
||||
const auto [sym, segments] = std::move(*dottedPath);
|
||||
}
|
||||
|
||||
LUAU_ASSERT(!segments.empty());
|
||||
|
||||
std::reverse(begin(segments), end(segments));
|
||||
std::reverse(begin(exprs), end(exprs));
|
||||
|
||||
auto lookupResult = scope->lookupEx(sym);
|
||||
if (!lookupResult)
|
||||
return check(scope, expr).ty;
|
||||
|
@ -1695,7 +1692,18 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
|
|||
symbolScope->bindings[sym].typeId = updatedType;
|
||||
symbolScope->dcrRefinements[*def] = updatedType;
|
||||
|
||||
astTypes[expr] = propTy;
|
||||
TypeId prevSegmentTy = updatedType;
|
||||
for (size_t i = 0; i < segments.size(); ++i)
|
||||
{
|
||||
TypeId segmentTy = arena->addType(BlockedType{});
|
||||
astTypes[exprs[i]] = segmentTy;
|
||||
addConstraint(scope, expr->location, HasPropConstraint{segmentTy, prevSegmentTy, segments[i]});
|
||||
prevSegmentTy = segmentTy;
|
||||
}
|
||||
|
||||
astTypes[expr] = prevSegmentTy;
|
||||
astTypes[e] = updatedType;
|
||||
// astTypes[expr] = propTy;
|
||||
|
||||
return propTy;
|
||||
}
|
||||
|
@ -1845,7 +1853,7 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
|
|||
|
||||
if (local->annotation)
|
||||
{
|
||||
annotationTy = resolveType(signatureScope, local->annotation, /* topLevel */ true);
|
||||
annotationTy = resolveType(signatureScope, local->annotation, /* inTypeArguments */ false);
|
||||
addConstraint(signatureScope, local->annotation->location, SubtypeConstraint{t, annotationTy});
|
||||
}
|
||||
else if (i < expectedArgPack.head.size())
|
||||
|
@ -1866,7 +1874,7 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
|
|||
{
|
||||
if (fn->varargAnnotation)
|
||||
{
|
||||
TypePackId annotationType = resolveTypePack(signatureScope, fn->varargAnnotation);
|
||||
TypePackId annotationType = resolveTypePack(signatureScope, fn->varargAnnotation, /* inTypeArguments */ false);
|
||||
varargPack = annotationType;
|
||||
}
|
||||
else if (expectedArgPack.tail && get<VariadicTypePack>(*expectedArgPack.tail))
|
||||
|
@ -1893,7 +1901,7 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
|
|||
// Type checking will sort out any discrepancies later.
|
||||
if (fn->returnAnnotation)
|
||||
{
|
||||
TypePackId annotatedRetType = resolveTypePack(signatureScope, *fn->returnAnnotation);
|
||||
TypePackId annotatedRetType = resolveTypePack(signatureScope, *fn->returnAnnotation, /* inTypeArguments */ false);
|
||||
|
||||
// We bind the annotated type directly here so that, when we need to
|
||||
// generate constraints for return types, we have a guarantee that we
|
||||
|
@ -1942,7 +1950,7 @@ void ConstraintGraphBuilder::checkFunctionBody(const ScopePtr& scope, AstExprFun
|
|||
}
|
||||
}
|
||||
|
||||
TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, bool topLevel)
|
||||
TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, bool inTypeArguments)
|
||||
{
|
||||
TypeId result = nullptr;
|
||||
|
||||
|
@ -1960,7 +1968,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
return builtinTypes->errorRecoveryType();
|
||||
}
|
||||
else
|
||||
return resolveType(scope, ref->parameters.data[0].type, topLevel);
|
||||
return resolveType(scope, ref->parameters.data[0].type, inTypeArguments);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1994,11 +2002,11 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
// that is done in the parser.
|
||||
if (p.type)
|
||||
{
|
||||
parameters.push_back(resolveType(scope, p.type));
|
||||
parameters.push_back(resolveType(scope, p.type, /* inTypeArguments */ true));
|
||||
}
|
||||
else if (p.typePack)
|
||||
{
|
||||
packParameters.push_back(resolveTypePack(scope, p.typePack));
|
||||
packParameters.push_back(resolveTypePack(scope, p.typePack, /* inTypeArguments */ true));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -2010,12 +2018,13 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
|
||||
result = arena->addType(PendingExpansionType{ref->prefix, ref->name, parameters, packParameters});
|
||||
|
||||
if (topLevel)
|
||||
{
|
||||
// If we're not in a type argument context, we need to create a constraint that expands this.
|
||||
// The dispatching of the above constraint will queue up additional constraints for nested
|
||||
// type function applications.
|
||||
if (!inTypeArguments)
|
||||
addConstraint(scope, ty->location, TypeAliasExpansionConstraint{/* target */ result});
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
std::string typeName;
|
||||
|
@ -2035,7 +2044,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
{
|
||||
std::string name = prop.name.value;
|
||||
// TODO: Recursion limit.
|
||||
TypeId propTy = resolveType(scope, prop.type);
|
||||
TypeId propTy = resolveType(scope, prop.type, inTypeArguments);
|
||||
// TODO: Fill in location.
|
||||
props[name] = {propTy};
|
||||
}
|
||||
|
@ -2044,8 +2053,8 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
{
|
||||
// TODO: Recursion limit.
|
||||
indexer = TableIndexer{
|
||||
resolveType(scope, tab->indexer->indexType),
|
||||
resolveType(scope, tab->indexer->resultType),
|
||||
resolveType(scope, tab->indexer->indexType, inTypeArguments),
|
||||
resolveType(scope, tab->indexer->resultType, inTypeArguments),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -2089,8 +2098,8 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
signatureScope = scope;
|
||||
}
|
||||
|
||||
TypePackId argTypes = resolveTypePack(signatureScope, fn->argTypes);
|
||||
TypePackId returnTypes = resolveTypePack(signatureScope, fn->returnTypes);
|
||||
TypePackId argTypes = resolveTypePack(signatureScope, fn->argTypes, inTypeArguments);
|
||||
TypePackId returnTypes = resolveTypePack(signatureScope, fn->returnTypes, inTypeArguments);
|
||||
|
||||
// TODO: FunctionType needs a pointer to the scope so that we know
|
||||
// how to quantify/instantiate it.
|
||||
|
@ -2130,7 +2139,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
for (AstType* part : unionAnnotation->types)
|
||||
{
|
||||
// TODO: Recursion limit.
|
||||
parts.push_back(resolveType(scope, part, topLevel));
|
||||
parts.push_back(resolveType(scope, part, inTypeArguments));
|
||||
}
|
||||
|
||||
result = arena->addType(UnionType{parts});
|
||||
|
@ -2141,7 +2150,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
for (AstType* part : intersectionAnnotation->types)
|
||||
{
|
||||
// TODO: Recursion limit.
|
||||
parts.push_back(resolveType(scope, part, topLevel));
|
||||
parts.push_back(resolveType(scope, part, inTypeArguments));
|
||||
}
|
||||
|
||||
result = arena->addType(IntersectionType{parts});
|
||||
|
@ -2168,16 +2177,16 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
|
|||
return result;
|
||||
}
|
||||
|
||||
TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, AstTypePack* tp)
|
||||
TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, AstTypePack* tp, bool inTypeArgument)
|
||||
{
|
||||
TypePackId result;
|
||||
if (auto expl = tp->as<AstTypePackExplicit>())
|
||||
{
|
||||
result = resolveTypePack(scope, expl->typeList);
|
||||
result = resolveTypePack(scope, expl->typeList, inTypeArgument);
|
||||
}
|
||||
else if (auto var = tp->as<AstTypePackVariadic>())
|
||||
{
|
||||
TypeId ty = resolveType(scope, var->variadicType);
|
||||
TypeId ty = resolveType(scope, var->variadicType, inTypeArgument);
|
||||
result = arena->addTypePack(TypePackVar{VariadicTypePack{ty}});
|
||||
}
|
||||
else if (auto gen = tp->as<AstTypePackGeneric>())
|
||||
|
@ -2202,19 +2211,19 @@ TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, AstTyp
|
|||
return result;
|
||||
}
|
||||
|
||||
TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, const AstTypeList& list)
|
||||
TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, const AstTypeList& list, bool inTypeArguments)
|
||||
{
|
||||
std::vector<TypeId> head;
|
||||
|
||||
for (AstType* headTy : list.types)
|
||||
{
|
||||
head.push_back(resolveType(scope, headTy));
|
||||
head.push_back(resolveType(scope, headTy, inTypeArguments));
|
||||
}
|
||||
|
||||
std::optional<TypePackId> tail = std::nullopt;
|
||||
if (list.tailType)
|
||||
{
|
||||
tail = resolveTypePack(scope, list.tailType);
|
||||
tail = resolveTypePack(scope, list.tailType, inTypeArguments);
|
||||
}
|
||||
|
||||
return arena->addTypePack(TypePack{head, tail});
|
||||
|
@ -2229,7 +2238,7 @@ std::vector<std::pair<Name, GenericTypeDefinition>> ConstraintGraphBuilder::crea
|
|||
std::optional<TypeId> defaultTy = std::nullopt;
|
||||
|
||||
if (generic.defaultValue)
|
||||
defaultTy = resolveType(scope, generic.defaultValue);
|
||||
defaultTy = resolveType(scope, generic.defaultValue, /* inTypeArguments */ false);
|
||||
|
||||
result.push_back({generic.name.value, GenericTypeDefinition{genericTy, defaultTy}});
|
||||
}
|
||||
|
@ -2247,7 +2256,7 @@ std::vector<std::pair<Name, GenericTypePackDefinition>> ConstraintGraphBuilder::
|
|||
std::optional<TypePackId> defaultTy = std::nullopt;
|
||||
|
||||
if (generic.defaultValue)
|
||||
defaultTy = resolveTypePack(scope, generic.defaultValue);
|
||||
defaultTy = resolveTypePack(scope, generic.defaultValue, /* inTypeArguments */ false);
|
||||
|
||||
result.push_back({generic.name.value, GenericTypePackDefinition{genericTy, defaultTy}});
|
||||
}
|
||||
|
|
|
@ -417,7 +417,7 @@ bool ConstraintSolver::tryDispatch(NotNull<const Constraint> constraint, bool fo
|
|||
else if (auto hpc = get<HasPropConstraint>(*constraint))
|
||||
success = tryDispatch(*hpc, constraint);
|
||||
else if (auto spc = get<SetPropConstraint>(*constraint))
|
||||
success = tryDispatch(*spc, constraint);
|
||||
success = tryDispatch(*spc, constraint, force);
|
||||
else if (auto sottc = get<SingletonOrTopTypeConstraint>(*constraint))
|
||||
success = tryDispatch(*sottc, constraint);
|
||||
else
|
||||
|
@ -933,13 +933,11 @@ struct InfiniteTypeFinder : TypeOnceVisitor
|
|||
struct InstantiationQueuer : TypeOnceVisitor
|
||||
{
|
||||
ConstraintSolver* solver;
|
||||
const InstantiationSignature& signature;
|
||||
NotNull<Scope> scope;
|
||||
Location location;
|
||||
|
||||
explicit InstantiationQueuer(NotNull<Scope> scope, const Location& location, ConstraintSolver* solver, const InstantiationSignature& signature)
|
||||
explicit InstantiationQueuer(NotNull<Scope> scope, const Location& location, ConstraintSolver* solver)
|
||||
: solver(solver)
|
||||
, signature(signature)
|
||||
, scope(scope)
|
||||
, location(location)
|
||||
{
|
||||
|
@ -1061,8 +1059,17 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
|
|||
TypeId instantiated = *maybeInstantiated;
|
||||
TypeId target = follow(instantiated);
|
||||
|
||||
// The application is not recursive, so we need to queue up application of
|
||||
// any child type function instantiations within the result in order for it
|
||||
// to be complete.
|
||||
InstantiationQueuer queuer{constraint->scope, constraint->location, this};
|
||||
queuer.traverse(target);
|
||||
|
||||
if (target->persistent)
|
||||
{
|
||||
bindResult(target);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Type function application will happily give us the exact same type if
|
||||
// there are e.g. generic saturatedTypeArguments that go unused.
|
||||
|
@ -1102,12 +1109,6 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
|
|||
|
||||
bindResult(target);
|
||||
|
||||
// The application is not recursive, so we need to queue up application of
|
||||
// any child type function instantiations within the result in order for it
|
||||
// to be complete.
|
||||
InstantiationQueuer queuer{constraint->scope, constraint->location, this, signature};
|
||||
queuer.traverse(target);
|
||||
|
||||
instantiatedAliases[signature] = target;
|
||||
|
||||
return true;
|
||||
|
@ -1326,13 +1327,16 @@ static std::optional<TypeId> updateTheTableType(NotNull<TypeArena> arena, TypeId
|
|||
return res;
|
||||
}
|
||||
|
||||
bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint)
|
||||
bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint, bool force)
|
||||
{
|
||||
TypeId subjectType = follow(c.subjectType);
|
||||
|
||||
if (isBlocked(subjectType))
|
||||
return block(subjectType, constraint);
|
||||
|
||||
if (!force && get<FreeType>(subjectType))
|
||||
return block(subjectType, constraint);
|
||||
|
||||
std::optional<TypeId> existingPropType = subjectType;
|
||||
for (const std::string& segment : c.path)
|
||||
{
|
||||
|
@ -1399,6 +1403,13 @@ bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Con
|
|||
return true;
|
||||
}
|
||||
}
|
||||
else if (get<ClassType>(subjectType))
|
||||
{
|
||||
// Classes never change shape as a result of property assignments.
|
||||
// The result is always the subject.
|
||||
bind(c.resultType, subjectType);
|
||||
return true;
|
||||
}
|
||||
else if (get<AnyType>(subjectType) || get<ErrorType>(subjectType))
|
||||
{
|
||||
bind(c.resultType, subjectType);
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#include <limits.h>
|
||||
|
||||
LUAU_FASTINTVARIABLE(LuauSuggestionDistance, 4)
|
||||
LUAU_FASTFLAGVARIABLE(LuauLintGlobalNeverReadBeforeWritten, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
@ -331,8 +330,7 @@ private:
|
|||
"Global '%s' is only used in the enclosing function defined at line %d; consider changing it to local",
|
||||
g.firstRef->name.value, top->location.begin.line + 1);
|
||||
}
|
||||
else if (FFlag::LuauLintGlobalNeverReadBeforeWritten && g.assigned && !g.readBeforeWritten && !g.definedInModuleScope &&
|
||||
g.firstRef->name != context->placeholder)
|
||||
else if (g.assigned && !g.readBeforeWritten && !g.definedInModuleScope && g.firstRef->name != context->placeholder)
|
||||
{
|
||||
emitWarning(*context, LintWarning::Code_GlobalUsedAsLocal, g.firstRef->location,
|
||||
"Global '%s' is never read before being written. Consider changing it to local", g.firstRef->name.value);
|
||||
|
@ -353,7 +351,7 @@ private:
|
|||
|
||||
bool visit(AstExprGlobal* node) override
|
||||
{
|
||||
if (FFlag::LuauLintGlobalNeverReadBeforeWritten && !functionStack.empty() && !functionStack.back().dominatedGlobals.contains(node->name))
|
||||
if (!functionStack.empty() && !functionStack.back().dominatedGlobals.contains(node->name))
|
||||
{
|
||||
Global& g = globals[node->name];
|
||||
g.readBeforeWritten = true;
|
||||
|
@ -386,8 +384,6 @@ private:
|
|||
{
|
||||
Global& g = globals[gv->name];
|
||||
|
||||
if (FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
{
|
||||
if (functionStack.empty())
|
||||
{
|
||||
g.definedInModuleScope = true;
|
||||
|
@ -399,7 +395,6 @@ private:
|
|||
functionStack.back().dominatedGlobals.insert(gv->name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (g.builtin)
|
||||
emitWarning(*context, LintWarning::Code_BuiltinGlobalWrite, gv->location,
|
||||
|
@ -437,12 +432,9 @@ private:
|
|||
else
|
||||
{
|
||||
g.assigned = true;
|
||||
if (FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
{
|
||||
g.definedAsFunction = true;
|
||||
g.definedInModuleScope = functionStack.empty();
|
||||
}
|
||||
}
|
||||
|
||||
trackGlobalRef(gv);
|
||||
}
|
||||
|
@ -475,9 +467,6 @@ private:
|
|||
|
||||
bool visit(AstStatIf* node) override
|
||||
{
|
||||
if (!FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
return true;
|
||||
|
||||
HoldConditionalExecution ce(*this);
|
||||
node->condition->visit(this);
|
||||
node->thenbody->visit(this);
|
||||
|
@ -489,9 +478,6 @@ private:
|
|||
|
||||
bool visit(AstStatWhile* node) override
|
||||
{
|
||||
if (!FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
return true;
|
||||
|
||||
HoldConditionalExecution ce(*this);
|
||||
node->condition->visit(this);
|
||||
node->body->visit(this);
|
||||
|
@ -501,9 +487,6 @@ private:
|
|||
|
||||
bool visit(AstStatRepeat* node) override
|
||||
{
|
||||
if (!FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
return true;
|
||||
|
||||
HoldConditionalExecution ce(*this);
|
||||
node->condition->visit(this);
|
||||
node->body->visit(this);
|
||||
|
@ -513,9 +496,6 @@ private:
|
|||
|
||||
bool visit(AstStatFor* node) override
|
||||
{
|
||||
if (!FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
return true;
|
||||
|
||||
HoldConditionalExecution ce(*this);
|
||||
node->from->visit(this);
|
||||
node->to->visit(this);
|
||||
|
@ -530,9 +510,6 @@ private:
|
|||
|
||||
bool visit(AstStatForIn* node) override
|
||||
{
|
||||
if (!FFlag::LuauLintGlobalNeverReadBeforeWritten)
|
||||
return true;
|
||||
|
||||
HoldConditionalExecution ce(*this);
|
||||
for (AstExpr* expr : node->values)
|
||||
expr->visit(this);
|
||||
|
|
|
@ -17,13 +17,10 @@ LUAU_FASTFLAGVARIABLE(DebugLuauCheckNormalizeInvariant, false)
|
|||
// This could theoretically be 2000 on amd64, but x86 requires this.
|
||||
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
|
||||
LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNormalizeCombineTableFix, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauTypeNormalization2, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNegatedClassTypes, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNegatedFunctionTypes, false);
|
||||
LUAU_FASTFLAG(LuauUnknownAndNeverType)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauOverloadedFunctionSubtypingPerf);
|
||||
LUAU_FASTFLAG(LuauUninhabitedSubAnything2)
|
||||
|
||||
namespace Luau
|
||||
|
@ -2165,7 +2162,7 @@ std::optional<TypeId> Normalizer::intersectionOfFunctions(TypeId here, TypeId th
|
|||
argTypes = *argTypesOpt;
|
||||
retTypes = hftv->retTypes;
|
||||
}
|
||||
else if (FFlag::LuauOverloadedFunctionSubtypingPerf && hftv->argTypes == tftv->argTypes)
|
||||
else if (hftv->argTypes == tftv->argTypes)
|
||||
{
|
||||
std::optional<TypePackId> retTypesOpt = intersectionOfTypePacks(hftv->argTypes, tftv->argTypes);
|
||||
if (!retTypesOpt)
|
||||
|
|
|
@ -157,6 +157,8 @@ struct PureQuantifier : Substitution
|
|||
Scope* scope;
|
||||
std::vector<TypeId> insertedGenerics;
|
||||
std::vector<TypePackId> insertedGenericPacks;
|
||||
bool seenMutableType = false;
|
||||
bool seenGenericType = false;
|
||||
|
||||
PureQuantifier(TypeArena* arena, Scope* scope)
|
||||
: Substitution(TxnLog::empty(), arena)
|
||||
|
@ -170,11 +172,18 @@ struct PureQuantifier : Substitution
|
|||
|
||||
if (auto ftv = get<FreeType>(ty))
|
||||
{
|
||||
return subsumes(scope, ftv->scope);
|
||||
bool result = subsumes(scope, ftv->scope);
|
||||
seenMutableType |= result;
|
||||
return result;
|
||||
}
|
||||
else if (auto ttv = get<TableType>(ty))
|
||||
{
|
||||
return ttv->state == TableState::Free && subsumes(scope, ttv->scope);
|
||||
if (ttv->state == TableState::Free)
|
||||
seenMutableType = true;
|
||||
else if (ttv->state == TableState::Generic)
|
||||
seenGenericType = true;
|
||||
|
||||
return ttv->state == TableState::Unsealed || (ttv->state == TableState::Free && subsumes(scope, ttv->scope));
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -207,7 +216,11 @@ struct PureQuantifier : Substitution
|
|||
*resultTable = *ttv;
|
||||
resultTable->level = TypeLevel{};
|
||||
resultTable->scope = scope;
|
||||
|
||||
if (ttv->state == TableState::Free)
|
||||
resultTable->state = TableState::Generic;
|
||||
else if (ttv->state == TableState::Unsealed)
|
||||
resultTable->state = TableState::Sealed;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -251,7 +264,7 @@ TypeId quantify(TypeArena* arena, TypeId ty, Scope* scope)
|
|||
ftv->scope = scope;
|
||||
ftv->generics.insert(ftv->generics.end(), quantifier.insertedGenerics.begin(), quantifier.insertedGenerics.end());
|
||||
ftv->genericPacks.insert(ftv->genericPacks.end(), quantifier.insertedGenericPacks.begin(), quantifier.insertedGenericPacks.end());
|
||||
ftv->hasNoGenerics = ftv->generics.empty() && ftv->genericPacks.empty();
|
||||
ftv->hasNoGenerics = ftv->generics.empty() && ftv->genericPacks.empty() && !quantifier.seenGenericType && !quantifier.seenMutableType;
|
||||
|
||||
return *result;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauUnknownAndNeverType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauFunctionReturnStringificationFixup, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUnseeArrayTtv, false)
|
||||
|
||||
/*
|
||||
* Prefix generic typenames with gen-
|
||||
|
@ -311,8 +310,7 @@ struct TypeStringifier
|
|||
}
|
||||
|
||||
Luau::visit(
|
||||
[this, tv](auto&& t)
|
||||
{
|
||||
[this, tv](auto&& t) {
|
||||
return (*this)(tv, t);
|
||||
},
|
||||
tv->ty);
|
||||
|
@ -607,9 +605,7 @@ struct TypeStringifier
|
|||
stringify(ttv.indexer->indexResultType);
|
||||
state.emit("}");
|
||||
|
||||
if (FFlag::LuauUnseeArrayTtv)
|
||||
state.unsee(&ttv);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -910,8 +906,7 @@ struct TypePackStringifier
|
|||
}
|
||||
|
||||
Luau::visit(
|
||||
[this, tp](auto&& t)
|
||||
{
|
||||
[this, tp](auto&& t) {
|
||||
return (*this)(tp, t);
|
||||
},
|
||||
tp->ty);
|
||||
|
@ -1061,9 +1056,7 @@ static void assignCycleNames(const std::set<TypeId>& cycles, const std::set<Type
|
|||
if (auto ttv = get<TableType>(follow(cycleTy)); !exhaustive && ttv && (ttv->syntheticName || ttv->name))
|
||||
{
|
||||
// If we have a cycle type in type parameters, assign a cycle name for this named table
|
||||
if (std::find_if(ttv->instantiatedTypeParams.begin(), ttv->instantiatedTypeParams.end(),
|
||||
[&](auto&& el)
|
||||
{
|
||||
if (std::find_if(ttv->instantiatedTypeParams.begin(), ttv->instantiatedTypeParams.end(), [&](auto&& el) {
|
||||
return cycles.count(follow(el));
|
||||
}) != ttv->instantiatedTypeParams.end())
|
||||
cycleNames[cycleTy] = ttv->name ? *ttv->name : *ttv->syntheticName;
|
||||
|
@ -1160,9 +1153,7 @@ ToStringResult toStringDetailed(TypeId ty, ToStringOptions& opts)
|
|||
state.exhaustive = true;
|
||||
|
||||
std::vector<std::pair<TypeId, std::string>> sortedCycleNames{state.cycleNames.begin(), state.cycleNames.end()};
|
||||
std::sort(sortedCycleNames.begin(), sortedCycleNames.end(),
|
||||
[](const auto& a, const auto& b)
|
||||
{
|
||||
std::sort(sortedCycleNames.begin(), sortedCycleNames.end(), [](const auto& a, const auto& b) {
|
||||
return a.second < b.second;
|
||||
});
|
||||
|
||||
|
@ -1175,8 +1166,7 @@ ToStringResult toStringDetailed(TypeId ty, ToStringOptions& opts)
|
|||
state.emit(name);
|
||||
state.emit(" = ");
|
||||
Luau::visit(
|
||||
[&tvs, cycleTy = cycleTy](auto&& t)
|
||||
{
|
||||
[&tvs, cycleTy = cycleTy](auto&& t) {
|
||||
return tvs(cycleTy, t);
|
||||
},
|
||||
cycleTy->ty);
|
||||
|
@ -1257,9 +1247,7 @@ ToStringResult toStringDetailed(TypePackId tp, ToStringOptions& opts)
|
|||
state.exhaustive = true;
|
||||
|
||||
std::vector<std::pair<TypeId, std::string>> sortedCycleNames{state.cycleNames.begin(), state.cycleNames.end()};
|
||||
std::sort(sortedCycleNames.begin(), sortedCycleNames.end(),
|
||||
[](const auto& a, const auto& b)
|
||||
{
|
||||
std::sort(sortedCycleNames.begin(), sortedCycleNames.end(), [](const auto& a, const auto& b) {
|
||||
return a.second < b.second;
|
||||
});
|
||||
|
||||
|
@ -1272,8 +1260,7 @@ ToStringResult toStringDetailed(TypePackId tp, ToStringOptions& opts)
|
|||
state.emit(name);
|
||||
state.emit(" = ");
|
||||
Luau::visit(
|
||||
[&tvs, cycleTy = cycleTy](auto t)
|
||||
{
|
||||
[&tvs, cycleTy = cycleTy](auto t) {
|
||||
return tvs(cycleTy, t);
|
||||
},
|
||||
cycleTy->ty);
|
||||
|
@ -1413,6 +1400,15 @@ std::string dump(TypeId ty)
|
|||
return s;
|
||||
}
|
||||
|
||||
std::string dump(const std::optional<TypeId>& ty)
|
||||
{
|
||||
if (ty)
|
||||
return dump(*ty);
|
||||
|
||||
printf("nullopt\n");
|
||||
return "nullopt";
|
||||
}
|
||||
|
||||
std::string dump(TypePackId ty)
|
||||
{
|
||||
std::string s = toString(ty, dumpOptions());
|
||||
|
@ -1420,6 +1416,15 @@ std::string dump(TypePackId ty)
|
|||
return s;
|
||||
}
|
||||
|
||||
std::string dump(const std::optional<TypePackId>& ty)
|
||||
{
|
||||
if (ty)
|
||||
return dump(*ty);
|
||||
|
||||
printf("nullopt\n");
|
||||
return "nullopt";
|
||||
}
|
||||
|
||||
std::string dump(const ScopePtr& scope, const char* name)
|
||||
{
|
||||
auto binding = scope->linearSearchForBinding(name);
|
||||
|
|
|
@ -27,6 +27,7 @@ LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
|||
LUAU_FASTFLAG(LuauUnknownAndNeverType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMaybeGenericIntersectionTypes, false)
|
||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMatchReturnsOptionalString, false);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
@ -768,15 +769,16 @@ BuiltinTypes::BuiltinTypes()
|
|||
, errorType(arena->addType(Type{ErrorType{}, /*persistent*/ true}))
|
||||
, falsyType(arena->addType(Type{UnionType{{falseType, nilType}}, /*persistent*/ true}))
|
||||
, truthyType(arena->addType(Type{NegationType{falsyType}, /*persistent*/ true}))
|
||||
, optionalNumberType(arena->addType(Type{UnionType{{numberType, nilType}}, /*persistent*/ true}))
|
||||
, optionalStringType(arena->addType(Type{UnionType{{stringType, nilType}}, /*persistent*/ true}))
|
||||
, anyTypePack(arena->addTypePack(TypePackVar{VariadicTypePack{anyType}, /*persistent*/ true}))
|
||||
, neverTypePack(arena->addTypePack(TypePackVar{VariadicTypePack{neverType}, /*persistent*/ true}))
|
||||
, uninhabitableTypePack(arena->addTypePack({neverType}, neverTypePack))
|
||||
, uninhabitableTypePack(arena->addTypePack(TypePackVar{TypePack{{neverType}, neverTypePack}, /*persistent*/ true}))
|
||||
, errorTypePack(arena->addTypePack(TypePackVar{Unifiable::Error{}, /*persistent*/ true}))
|
||||
{
|
||||
TypeId stringMetatable = makeStringMetatable();
|
||||
asMutable(stringType)->ty = PrimitiveType{PrimitiveType::String, stringMetatable};
|
||||
persist(stringMetatable);
|
||||
persist(uninhabitableTypePack);
|
||||
|
||||
freeze(*arena);
|
||||
}
|
||||
|
@ -1231,12 +1233,12 @@ static std::vector<TypeId> parsePatternString(NotNull<BuiltinTypes> builtinTypes
|
|||
if (i + 1 < size && data[i + 1] == ')')
|
||||
{
|
||||
i++;
|
||||
result.push_back(builtinTypes->numberType);
|
||||
result.push_back(FFlag::LuauMatchReturnsOptionalString ? builtinTypes->optionalNumberType : builtinTypes->numberType);
|
||||
continue;
|
||||
}
|
||||
|
||||
++depth;
|
||||
result.push_back(builtinTypes->stringType);
|
||||
result.push_back(FFlag::LuauMatchReturnsOptionalString ? builtinTypes->optionalStringType : builtinTypes->stringType);
|
||||
}
|
||||
else if (data[i] == ')')
|
||||
{
|
||||
|
@ -1254,7 +1256,7 @@ static std::vector<TypeId> parsePatternString(NotNull<BuiltinTypes> builtinTypes
|
|||
return std::vector<TypeId>();
|
||||
|
||||
if (result.empty())
|
||||
result.push_back(builtinTypes->stringType);
|
||||
result.push_back(FFlag::LuauMatchReturnsOptionalString ? builtinTypes->optionalStringType : builtinTypes->stringType);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "Luau/Ast.h"
|
||||
#include "Luau/AstQuery.h"
|
||||
#include "Luau/Clone.h"
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/Instantiation.h"
|
||||
#include "Luau/Metamethods.h"
|
||||
#include "Luau/Normalize.h"
|
||||
|
@ -212,6 +213,12 @@ struct TypeChecker2
|
|||
return bestScope;
|
||||
}
|
||||
|
||||
enum ValueContext
|
||||
{
|
||||
LValue,
|
||||
RValue
|
||||
};
|
||||
|
||||
void visit(AstStat* stat)
|
||||
{
|
||||
auto pusher = pushStack(stat);
|
||||
|
@ -273,7 +280,7 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstStatIf* ifStatement)
|
||||
{
|
||||
visit(ifStatement->condition);
|
||||
visit(ifStatement->condition, RValue);
|
||||
visit(ifStatement->thenbody);
|
||||
if (ifStatement->elsebody)
|
||||
visit(ifStatement->elsebody);
|
||||
|
@ -281,14 +288,14 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstStatWhile* whileStatement)
|
||||
{
|
||||
visit(whileStatement->condition);
|
||||
visit(whileStatement->condition, RValue);
|
||||
visit(whileStatement->body);
|
||||
}
|
||||
|
||||
void visit(AstStatRepeat* repeatStatement)
|
||||
{
|
||||
visit(repeatStatement->body);
|
||||
visit(repeatStatement->condition);
|
||||
visit(repeatStatement->condition, RValue);
|
||||
}
|
||||
|
||||
void visit(AstStatBreak*) {}
|
||||
|
@ -315,12 +322,12 @@ struct TypeChecker2
|
|||
}
|
||||
|
||||
for (AstExpr* expr : ret->list)
|
||||
visit(expr);
|
||||
visit(expr, RValue);
|
||||
}
|
||||
|
||||
void visit(AstStatExpr* expr)
|
||||
{
|
||||
visit(expr->expr);
|
||||
visit(expr->expr, RValue);
|
||||
}
|
||||
|
||||
void visit(AstStatLocal* local)
|
||||
|
@ -331,7 +338,7 @@ struct TypeChecker2
|
|||
AstExpr* value = i < local->values.size ? local->values.data[i] : nullptr;
|
||||
|
||||
if (value)
|
||||
visit(value);
|
||||
visit(value, RValue);
|
||||
|
||||
TypeId* maybeValueType = value ? module->astTypes.find(value) : nullptr;
|
||||
if (i != local->values.size - 1 || maybeValueType)
|
||||
|
@ -387,10 +394,10 @@ struct TypeChecker2
|
|||
if (forStatement->var->annotation)
|
||||
visit(forStatement->var->annotation);
|
||||
|
||||
visit(forStatement->from);
|
||||
visit(forStatement->to);
|
||||
visit(forStatement->from, RValue);
|
||||
visit(forStatement->to, RValue);
|
||||
if (forStatement->step)
|
||||
visit(forStatement->step);
|
||||
visit(forStatement->step, RValue);
|
||||
visit(forStatement->body);
|
||||
}
|
||||
|
||||
|
@ -403,7 +410,7 @@ struct TypeChecker2
|
|||
}
|
||||
|
||||
for (AstExpr* expr : forInStatement->values)
|
||||
visit(expr);
|
||||
visit(expr, RValue);
|
||||
|
||||
visit(forInStatement->body);
|
||||
|
||||
|
@ -610,11 +617,11 @@ struct TypeChecker2
|
|||
for (size_t i = 0; i < count; ++i)
|
||||
{
|
||||
AstExpr* lhs = assign->vars.data[i];
|
||||
visit(lhs);
|
||||
visit(lhs, LValue);
|
||||
TypeId lhsType = lookupType(lhs);
|
||||
|
||||
AstExpr* rhs = assign->values.data[i];
|
||||
visit(rhs);
|
||||
visit(rhs, RValue);
|
||||
TypeId rhsType = lookupType(rhs);
|
||||
|
||||
if (!isSubtype(rhsType, lhsType, stack.back()))
|
||||
|
@ -635,7 +642,7 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstStatFunction* stat)
|
||||
{
|
||||
visit(stat->name);
|
||||
visit(stat->name, LValue);
|
||||
visit(stat->func);
|
||||
}
|
||||
|
||||
|
@ -698,13 +705,13 @@ struct TypeChecker2
|
|||
void visit(AstStatError* stat)
|
||||
{
|
||||
for (AstExpr* expr : stat->expressions)
|
||||
visit(expr);
|
||||
visit(expr, RValue);
|
||||
|
||||
for (AstStat* s : stat->statements)
|
||||
visit(s);
|
||||
}
|
||||
|
||||
void visit(AstExpr* expr)
|
||||
void visit(AstExpr* expr, ValueContext context)
|
||||
{
|
||||
auto StackPusher = pushStack(expr);
|
||||
|
||||
|
@ -712,7 +719,7 @@ struct TypeChecker2
|
|||
{
|
||||
}
|
||||
else if (auto e = expr->as<AstExprGroup>())
|
||||
return visit(e);
|
||||
return visit(e, context);
|
||||
else if (auto e = expr->as<AstExprConstantNil>())
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprConstantBool>())
|
||||
|
@ -730,9 +737,9 @@ struct TypeChecker2
|
|||
else if (auto e = expr->as<AstExprCall>())
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprIndexName>())
|
||||
return visit(e);
|
||||
return visit(e, context);
|
||||
else if (auto e = expr->as<AstExprIndexExpr>())
|
||||
return visit(e);
|
||||
return visit(e, context);
|
||||
else if (auto e = expr->as<AstExprFunction>())
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprTable>())
|
||||
|
@ -754,9 +761,9 @@ struct TypeChecker2
|
|||
LUAU_ASSERT(!"TypeChecker2 encountered an unknown expression type");
|
||||
}
|
||||
|
||||
void visit(AstExprGroup* expr)
|
||||
void visit(AstExprGroup* expr, ValueContext context)
|
||||
{
|
||||
visit(expr->expr);
|
||||
visit(expr->expr, context);
|
||||
}
|
||||
|
||||
void visit(AstExprConstantNil* expr)
|
||||
|
@ -808,10 +815,10 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstExprCall* call)
|
||||
{
|
||||
visit(call->func);
|
||||
visit(call->func, RValue);
|
||||
|
||||
for (AstExpr* arg : call->args)
|
||||
visit(arg);
|
||||
visit(arg, RValue);
|
||||
|
||||
TypeArena* arena = &testArena;
|
||||
Instantiation instantiation{TxnLog::empty(), arena, TypeLevel{}, stack.back()};
|
||||
|
@ -820,6 +827,8 @@ struct TypeChecker2
|
|||
TypeId functionType = lookupType(call->func);
|
||||
TypeId testFunctionType = functionType;
|
||||
TypePack args;
|
||||
std::vector<Location> argLocs;
|
||||
argLocs.reserve(call->args.size + 1);
|
||||
|
||||
if (get<AnyType>(functionType) || get<ErrorType>(functionType))
|
||||
return;
|
||||
|
@ -830,6 +839,7 @@ struct TypeChecker2
|
|||
if (std::optional<TypeId> instantiatedCallMm = instantiation.substitute(*callMm))
|
||||
{
|
||||
args.head.push_back(functionType);
|
||||
argLocs.push_back(call->func->location);
|
||||
testFunctionType = follow(*instantiatedCallMm);
|
||||
}
|
||||
else
|
||||
|
@ -899,11 +909,13 @@ struct TypeChecker2
|
|||
ice.ice("method call expression has no 'self'");
|
||||
|
||||
args.head.push_back(lookupType(indexExpr->expr));
|
||||
argLocs.push_back(indexExpr->expr->location);
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < call->args.size; ++i)
|
||||
{
|
||||
AstExpr* arg = call->args.data[i];
|
||||
argLocs.push_back(arg->location);
|
||||
TypeId* argTy = module->astTypes.find(arg);
|
||||
if (argTy)
|
||||
args.head.push_back(*argTy);
|
||||
|
@ -919,19 +931,34 @@ struct TypeChecker2
|
|||
args.head.push_back(builtinTypes->anyType);
|
||||
}
|
||||
|
||||
TypePackId argsTp = arena->addTypePack(args);
|
||||
FunctionType ftv{argsTp, expectedRetType};
|
||||
TypeId expectedType = arena->addType(ftv);
|
||||
TypePackId expectedArgTypes = arena->addTypePack(args);
|
||||
|
||||
if (!isSubtype(testFunctionType, expectedType, stack.back()))
|
||||
const FunctionType* inferredFunctionType = get<FunctionType>(testFunctionType);
|
||||
LUAU_ASSERT(inferredFunctionType); // testFunctionType should always be a FunctionType here
|
||||
|
||||
size_t argIndex = 0;
|
||||
auto inferredArgIt = begin(inferredFunctionType->argTypes);
|
||||
auto expectedArgIt = begin(expectedArgTypes);
|
||||
while (inferredArgIt != end(inferredFunctionType->argTypes) && expectedArgIt != end(expectedArgTypes))
|
||||
{
|
||||
CloneState cloneState;
|
||||
expectedType = clone(expectedType, testArena, cloneState);
|
||||
reportError(TypeMismatch{expectedType, functionType}, call->location);
|
||||
}
|
||||
Location argLoc = (argIndex >= argLocs.size()) ? argLocs.back() : argLocs[argIndex];
|
||||
reportErrors(tryUnify(stack.back(), argLoc, *expectedArgIt, *inferredArgIt));
|
||||
|
||||
++argIndex;
|
||||
++inferredArgIt;
|
||||
++expectedArgIt;
|
||||
}
|
||||
|
||||
void visit(AstExprIndexName* indexName)
|
||||
// piggyback on the unifier for arity checking, but we can't do this for checking the actual arguments since the locations would be bad
|
||||
ErrorVec errors = tryUnify(stack.back(), call->location, expectedArgTypes, inferredFunctionType->argTypes);
|
||||
for (TypeError e : errors)
|
||||
if (get<CountMismatch>(e) != nullptr)
|
||||
reportError(std::move(e));
|
||||
|
||||
reportErrors(tryUnify(stack.back(), call->location, inferredFunctionType->retTypes, expectedRetType, CountMismatch::FunctionResult));
|
||||
}
|
||||
|
||||
void visit(AstExprIndexName* indexName, ValueContext context)
|
||||
{
|
||||
TypeId leftType = lookupType(indexName->expr);
|
||||
|
||||
|
@ -939,14 +966,14 @@ struct TypeChecker2
|
|||
if (!norm)
|
||||
reportError(NormalizationTooComplex{}, indexName->indexLocation);
|
||||
|
||||
checkIndexTypeFromType(leftType, *norm, indexName->index.value, indexName->location);
|
||||
checkIndexTypeFromType(leftType, *norm, indexName->index.value, indexName->location, context);
|
||||
}
|
||||
|
||||
void visit(AstExprIndexExpr* indexExpr)
|
||||
void visit(AstExprIndexExpr* indexExpr, ValueContext context)
|
||||
{
|
||||
// TODO!
|
||||
visit(indexExpr->expr);
|
||||
visit(indexExpr->index);
|
||||
visit(indexExpr->expr, LValue);
|
||||
visit(indexExpr->index, RValue);
|
||||
}
|
||||
|
||||
void visit(AstExprFunction* fn)
|
||||
|
@ -986,14 +1013,14 @@ struct TypeChecker2
|
|||
for (const AstExprTable::Item& item : expr->items)
|
||||
{
|
||||
if (item.key)
|
||||
visit(item.key);
|
||||
visit(item.value);
|
||||
visit(item.key, LValue);
|
||||
visit(item.value, RValue);
|
||||
}
|
||||
}
|
||||
|
||||
void visit(AstExprUnary* expr)
|
||||
{
|
||||
visit(expr->expr);
|
||||
visit(expr->expr, RValue);
|
||||
|
||||
NotNull<Scope> scope = stack.back();
|
||||
TypeId operandType = lookupType(expr->expr);
|
||||
|
@ -1053,8 +1080,8 @@ struct TypeChecker2
|
|||
|
||||
TypeId visit(AstExprBinary* expr, void* overrideKey = nullptr)
|
||||
{
|
||||
visit(expr->left);
|
||||
visit(expr->right);
|
||||
visit(expr->left, LValue);
|
||||
visit(expr->right, LValue);
|
||||
|
||||
NotNull<Scope> scope = stack.back();
|
||||
|
||||
|
@ -1307,7 +1334,7 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstExprTypeAssertion* expr)
|
||||
{
|
||||
visit(expr->expr);
|
||||
visit(expr->expr, RValue);
|
||||
visit(expr->annotation);
|
||||
|
||||
TypeId annotationType = lookupAnnotation(expr->annotation);
|
||||
|
@ -1326,16 +1353,16 @@ struct TypeChecker2
|
|||
void visit(AstExprIfElse* expr)
|
||||
{
|
||||
// TODO!
|
||||
visit(expr->condition);
|
||||
visit(expr->trueExpr);
|
||||
visit(expr->falseExpr);
|
||||
visit(expr->condition, RValue);
|
||||
visit(expr->trueExpr, RValue);
|
||||
visit(expr->falseExpr, RValue);
|
||||
}
|
||||
|
||||
void visit(AstExprError* expr)
|
||||
{
|
||||
// TODO!
|
||||
for (AstExpr* e : expr->expressions)
|
||||
visit(e);
|
||||
visit(e, RValue);
|
||||
}
|
||||
|
||||
/** Extract a TypeId for the first type of the provided pack.
|
||||
|
@ -1550,7 +1577,7 @@ struct TypeChecker2
|
|||
|
||||
void visit(AstTypeTypeof* ty)
|
||||
{
|
||||
visit(ty->expr);
|
||||
visit(ty->expr, RValue);
|
||||
}
|
||||
|
||||
void visit(AstTypeUnion* ty)
|
||||
|
@ -1630,9 +1657,10 @@ struct TypeChecker2
|
|||
}
|
||||
|
||||
template<typename TID>
|
||||
ErrorVec tryUnify(NotNull<Scope> scope, const Location& location, TID subTy, TID superTy)
|
||||
ErrorVec tryUnify(NotNull<Scope> scope, const Location& location, TID subTy, TID superTy, CountMismatch::Context context = CountMismatch::Arg)
|
||||
{
|
||||
Unifier u{NotNull{&normalizer}, Mode::Strict, scope, location, Covariant};
|
||||
u.ctx = context;
|
||||
u.useScopes = true;
|
||||
u.tryUnify(subTy, superTy);
|
||||
|
||||
|
@ -1658,7 +1686,7 @@ struct TypeChecker2
|
|||
reportError(std::move(e));
|
||||
}
|
||||
|
||||
void checkIndexTypeFromType(TypeId denormalizedTy, const NormalizedType& norm, const std::string& prop, const Location& location)
|
||||
void checkIndexTypeFromType(TypeId tableTy, const NormalizedType& norm, const std::string& prop, const Location& location, ValueContext context)
|
||||
{
|
||||
bool foundOneProp = false;
|
||||
std::vector<TypeId> typesMissingTheProp;
|
||||
|
@ -1723,9 +1751,11 @@ struct TypeChecker2
|
|||
if (!typesMissingTheProp.empty())
|
||||
{
|
||||
if (foundOneProp)
|
||||
reportError(TypeError{location, MissingUnionProperty{denormalizedTy, typesMissingTheProp, prop}});
|
||||
reportError(MissingUnionProperty{tableTy, typesMissingTheProp, prop}, location);
|
||||
else if (context == LValue)
|
||||
reportError(CannotExtendTable{tableTy, CannotExtendTable::Property, prop}, location);
|
||||
else
|
||||
reportError(TypeError{location, UnknownProperty{denormalizedTy, prop}});
|
||||
reportError(UnknownProperty{tableTy, prop}, location);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,16 +32,13 @@ LUAU_FASTINTVARIABLE(LuauTypeInferTypePackLoopLimit, 5000)
|
|||
LUAU_FASTINTVARIABLE(LuauCheckRecursionLimit, 300)
|
||||
LUAU_FASTINTVARIABLE(LuauVisitRecursionLimit, 500)
|
||||
LUAU_FASTFLAG(LuauKnowsTheDataModel3)
|
||||
LUAU_FASTFLAG(LuauTypeNormalization2)
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauFreezeDuringUnification, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReturnAnyInsteadOfICE, false) // Eventually removed as false.
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauSharedSelf, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUnknownAndNeverType, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTypeInferMissingFollows, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauBinaryNeedsExpectedTypesToo, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauNeverTypesAndOperatorsInference, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauScopelessModule, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauFollowInLvalueIndexCheck, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReturnsFromCallsitesAreNotWidened, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTryhardAnd, false)
|
||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||
|
@ -52,9 +49,8 @@ LUAU_FASTFLAGVARIABLE(LuauIntersectionTestForEquality, false)
|
|||
LUAU_FASTFLAGVARIABLE(LuauImplicitElseRefinement, false)
|
||||
LUAU_FASTFLAG(LuauNegatedClassTypes)
|
||||
LUAU_FASTFLAGVARIABLE(LuauAllowIndexClassParameters, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauDeclareClassPrototype, false)
|
||||
LUAU_FASTFLAG(LuauUninhabitedSubAnything2)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCallableClasses, false)
|
||||
LUAU_FASTFLAG(SupportTypeAliasGoToDeclaration)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
@ -333,12 +329,9 @@ ModulePtr TypeChecker::checkWithoutRecursionCheck(const SourceModule& module, Mo
|
|||
|
||||
prepareErrorsForDisplay(currentModule->errors);
|
||||
|
||||
if (FFlag::LuauTypeNormalization2)
|
||||
{
|
||||
// Clear the normalizer caches, since they contain types from the internal type surface
|
||||
normalizer.clearCaches();
|
||||
normalizer.arena = nullptr;
|
||||
}
|
||||
|
||||
currentModule->clonePublicInterface(builtinTypes, *iceHandler);
|
||||
|
||||
|
@ -512,7 +505,7 @@ void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const A
|
|||
prototype(scope, *typealias, subLevel);
|
||||
++subLevel;
|
||||
}
|
||||
else if (const auto& declaredClass = stat->as<AstStatDeclareClass>(); FFlag::LuauDeclareClassPrototype && declaredClass)
|
||||
else if (const auto& declaredClass = stat->as<AstStatDeclareClass>())
|
||||
{
|
||||
prototype(scope, *declaredClass);
|
||||
}
|
||||
|
@ -1137,8 +1130,12 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatLocal& local)
|
|||
const Name name{local.vars.data[i]->name.value};
|
||||
|
||||
if (ModulePtr module = resolver->getModule(moduleInfo->name))
|
||||
{
|
||||
scope->importedTypeBindings[name] =
|
||||
FFlag::LuauScopelessModule ? module->exportedTypeBindings : module->getModuleScope()->exportedTypeBindings;
|
||||
if (FFlag::SupportTypeAliasGoToDeclaration)
|
||||
scope->importedModules[name] = moduleInfo->name;
|
||||
}
|
||||
|
||||
// In non-strict mode we force the module type on the variable, in strict mode it is already unified
|
||||
if (isNonstrictMode())
|
||||
|
@ -1622,6 +1619,8 @@ void TypeChecker::prototype(const ScopePtr& scope, const AstStatTypeAlias& typea
|
|||
bindingsMap[name] = {std::move(generics), std::move(genericPacks), ty};
|
||||
|
||||
scope->typeAliasLocations[name] = typealias.location;
|
||||
if (FFlag::SupportTypeAliasGoToDeclaration)
|
||||
scope->typeAliasNameLocations[name] = typealias.nameLocation;
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -1640,12 +1639,13 @@ void TypeChecker::prototype(const ScopePtr& scope, const AstStatTypeAlias& typea
|
|||
bindingsMap[name] = {std::move(generics), std::move(genericPacks), ty};
|
||||
|
||||
scope->typeAliasLocations[name] = typealias.location;
|
||||
if (FFlag::SupportTypeAliasGoToDeclaration)
|
||||
scope->typeAliasNameLocations[name] = typealias.nameLocation;
|
||||
}
|
||||
}
|
||||
|
||||
void TypeChecker::prototype(const ScopePtr& scope, const AstStatDeclareClass& declaredClass)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauDeclareClassPrototype);
|
||||
std::optional<TypeId> superTy = FFlag::LuauNegatedClassTypes ? std::make_optional(builtinTypes->classType) : std::nullopt;
|
||||
if (declaredClass.superName)
|
||||
{
|
||||
|
@ -1684,8 +1684,6 @@ void TypeChecker::prototype(const ScopePtr& scope, const AstStatDeclareClass& de
|
|||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass& declaredClass)
|
||||
{
|
||||
if (FFlag::LuauDeclareClassPrototype)
|
||||
{
|
||||
Name className(declaredClass.name.value);
|
||||
|
||||
// Don't bother checking if the class definition was incorrect
|
||||
|
@ -1757,96 +1755,6 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass& declar
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
std::optional<TypeId> superTy = FFlag::LuauNegatedClassTypes ? std::make_optional(builtinTypes->classType) : std::nullopt;
|
||||
if (declaredClass.superName)
|
||||
{
|
||||
Name superName = Name(declaredClass.superName->value);
|
||||
std::optional<TypeFun> lookupType = scope->lookupType(superName);
|
||||
|
||||
if (!lookupType)
|
||||
{
|
||||
reportError(declaredClass.location, UnknownSymbol{superName, UnknownSymbol::Type});
|
||||
return;
|
||||
}
|
||||
|
||||
// We don't have generic classes, so this assertion _should_ never be hit.
|
||||
LUAU_ASSERT(lookupType->typeParams.size() == 0 && lookupType->typePackParams.size() == 0);
|
||||
superTy = lookupType->type;
|
||||
|
||||
if (!get<ClassType>(follow(*superTy)))
|
||||
{
|
||||
reportError(declaredClass.location, GenericError{format("Cannot use non-class type '%s' as a superclass of class '%s'",
|
||||
superName.c_str(), declaredClass.name.value)});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Name className(declaredClass.name.value);
|
||||
|
||||
TypeId classTy = addType(ClassType(className, {}, superTy, std::nullopt, {}, {}, currentModuleName));
|
||||
|
||||
ClassType* ctv = getMutable<ClassType>(classTy);
|
||||
TypeId metaTy = addType(TableType{TableState::Sealed, scope->level});
|
||||
TableType* metatable = getMutable<TableType>(metaTy);
|
||||
|
||||
ctv->metatable = metaTy;
|
||||
|
||||
scope->exportedTypeBindings[className] = TypeFun{{}, classTy};
|
||||
|
||||
for (const AstDeclaredClassProp& prop : declaredClass.props)
|
||||
{
|
||||
Name propName(prop.name.value);
|
||||
TypeId propTy = resolveType(scope, *prop.ty);
|
||||
|
||||
bool assignToMetatable = isMetamethod(propName);
|
||||
Luau::ClassType::Props& assignTo = assignToMetatable ? metatable->props : ctv->props;
|
||||
|
||||
// Function types always take 'self', but this isn't reflected in the
|
||||
// parsed annotation. Add it here.
|
||||
if (prop.isMethod)
|
||||
{
|
||||
if (FunctionType* ftv = getMutable<FunctionType>(propTy))
|
||||
{
|
||||
ftv->argNames.insert(ftv->argNames.begin(), FunctionArgument{"self", {}});
|
||||
ftv->argTypes = addTypePack(TypePack{{classTy}, ftv->argTypes});
|
||||
ftv->hasSelf = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (assignTo.count(propName) == 0)
|
||||
{
|
||||
assignTo[propName] = {propTy};
|
||||
}
|
||||
else
|
||||
{
|
||||
TypeId currentTy = assignTo[propName].type;
|
||||
|
||||
// We special-case this logic to keep the intersection flat; otherwise we
|
||||
// would create a ton of nested intersection types.
|
||||
if (const IntersectionType* itv = get<IntersectionType>(currentTy))
|
||||
{
|
||||
std::vector<TypeId> options = itv->parts;
|
||||
options.push_back(propTy);
|
||||
TypeId newItv = addType(IntersectionType{std::move(options)});
|
||||
|
||||
assignTo[propName] = {newItv};
|
||||
}
|
||||
else if (get<FunctionType>(currentTy))
|
||||
{
|
||||
TypeId intersection = addType(IntersectionType{{currentTy, propTy}});
|
||||
|
||||
assignTo[propName] = {intersection};
|
||||
}
|
||||
else
|
||||
{
|
||||
reportError(declaredClass.location, GenericError{format("Cannot overload non-function class member '%s'", propName.c_str())});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareFunction& global)
|
||||
|
@ -3364,7 +3272,6 @@ TypeId TypeChecker::checkLValueBinding(const ScopePtr& scope, const AstExprIndex
|
|||
|
||||
TypeId indexType = checkExpr(scope, *expr.index).type;
|
||||
|
||||
if (FFlag::LuauFollowInLvalueIndexCheck)
|
||||
exprType = follow(exprType);
|
||||
|
||||
if (get<AnyType>(exprType) || get<ErrorType>(exprType))
|
||||
|
@ -4282,7 +4189,7 @@ std::optional<WithPredicate<TypePackId>> TypeChecker::checkCallOverload(const Sc
|
|||
{
|
||||
callTy = getIndexTypeFromType(scope, mttv->metatable, "__call", expr.func->location, /* addErrors= */ false);
|
||||
}
|
||||
else if (const ClassType* ctv = get<ClassType>(fn); FFlag::LuauCallableClasses && ctv && ctv->metatable)
|
||||
else if (const ClassType* ctv = get<ClassType>(fn); ctv && ctv->metatable)
|
||||
{
|
||||
callTy = getIndexTypeFromType(scope, *ctv->metatable, "__call", expr.func->location, /* addErrors= */ false);
|
||||
}
|
||||
|
@ -4479,7 +4386,7 @@ void TypeChecker::reportOverloadResolutionError(const ScopePtr& scope, const Ast
|
|||
std::string s;
|
||||
for (size_t i = 0; i < overloadTypes.size(); ++i)
|
||||
{
|
||||
TypeId overload = FFlag::LuauTypeInferMissingFollows ? follow(overloadTypes[i]) : overloadTypes[i];
|
||||
TypeId overload = follow(overloadTypes[i]);
|
||||
Unifier state = mkUnifier(scope, expr.location);
|
||||
|
||||
// Unify return types
|
||||
|
@ -4861,7 +4768,7 @@ TypePackId TypeChecker::anyifyModuleReturnTypePackGenerics(TypePackId tp)
|
|||
|
||||
if (const VariadicTypePack* vtp = get<VariadicTypePack>(tp))
|
||||
{
|
||||
TypeId ty = FFlag::LuauTypeInferMissingFollows ? follow(vtp->ty) : vtp->ty;
|
||||
TypeId ty = follow(vtp->ty);
|
||||
return get<GenericType>(ty) ? anyTypePack : tp;
|
||||
}
|
||||
|
||||
|
@ -6105,11 +6012,11 @@ void TypeChecker::resolve(const EqPredicate& eqP, RefinementMap& refis, const Sc
|
|||
if (optionIsSubtype && !targetIsSubtype)
|
||||
return option;
|
||||
else if (!optionIsSubtype && targetIsSubtype)
|
||||
return FFlag::LuauTypeInferMissingFollows ? follow(eqP.type) : eqP.type;
|
||||
return follow(eqP.type);
|
||||
else if (!optionIsSubtype && !targetIsSubtype)
|
||||
return nope;
|
||||
else if (optionIsSubtype && targetIsSubtype)
|
||||
return FFlag::LuauTypeInferMissingFollows ? follow(eqP.type) : eqP.type;
|
||||
return follow(eqP.type);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
|
||||
#include <stdexcept>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauTxnLogTypePackIterator, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
|
@ -62,8 +60,8 @@ TypePackIterator::TypePackIterator(TypePackId typePack)
|
|||
}
|
||||
|
||||
TypePackIterator::TypePackIterator(TypePackId typePack, const TxnLog* log)
|
||||
: currentTypePack(FFlag::LuauTxnLogTypePackIterator ? log->follow(typePack) : follow(typePack))
|
||||
, tp(FFlag::LuauTxnLogTypePackIterator ? log->get<TypePack>(currentTypePack) : get<TypePack>(currentTypePack))
|
||||
: currentTypePack(log->follow(typePack))
|
||||
, tp(log->get<TypePack>(currentTypePack))
|
||||
, currentIndex(0)
|
||||
, log(log)
|
||||
{
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "Luau/Common.h"
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/RecursionCounter.h"
|
||||
#include "Luau/VisitType.h"
|
||||
|
||||
#include <numeric>
|
||||
#include <deque>
|
||||
|
@ -18,24 +19,7 @@ namespace Luau
|
|||
namespace
|
||||
{
|
||||
|
||||
struct RecursionGuard : RecursionLimiter
|
||||
{
|
||||
std::deque<const void*>* seen;
|
||||
|
||||
RecursionGuard(int* count, int limit, std::deque<const void*>* seen)
|
||||
: RecursionLimiter(count, limit)
|
||||
, seen(seen)
|
||||
{
|
||||
// count has been incremented, which should imply that seen has already had an element pushed in.
|
||||
LUAU_ASSERT(size_t(*count) == seen->size());
|
||||
}
|
||||
|
||||
~RecursionGuard()
|
||||
{
|
||||
LUAU_ASSERT(!seen->empty()); // It is UB to pop_back() on an empty deque.
|
||||
seen->pop_back();
|
||||
}
|
||||
};
|
||||
using detail::ReductionContext;
|
||||
|
||||
template<typename A, typename B, typename Thing>
|
||||
std::pair<const A*, const B*> get2(const Thing& one, const Thing& two)
|
||||
|
@ -51,15 +35,11 @@ struct TypeReducer
|
|||
NotNull<BuiltinTypes> builtinTypes;
|
||||
NotNull<InternalErrorReporter> handle;
|
||||
|
||||
std::unordered_map<TypeId, TypeId> copies;
|
||||
std::deque<const void*> seen;
|
||||
int depth = 0;
|
||||
DenseHashMap<TypeId, ReductionContext<TypeId>>* memoizedTypes;
|
||||
DenseHashMap<TypePackId, ReductionContext<TypePackId>>* memoizedTypePacks;
|
||||
DenseHashSet<TypeId>* cyclicTypes;
|
||||
|
||||
// When we encounter _any type_ that which is usually mutated in-place, we need to not cache the result.
|
||||
// e.g. `'a & {} T` may have an upper bound constraint `{}` placed upon `'a`, but this constraint was not
|
||||
// known when we decided to reduce this intersection type. By not caching, we'll always be forced to perform
|
||||
// the reduction calculus over again.
|
||||
bool cacheOk = true;
|
||||
int depth = 0;
|
||||
|
||||
TypeId reduce(TypeId ty);
|
||||
TypePackId reduce(TypePackId tp);
|
||||
|
@ -70,62 +50,73 @@ struct TypeReducer
|
|||
TypeId functionType(TypeId ty);
|
||||
TypeId negationType(TypeId ty);
|
||||
|
||||
RecursionGuard guard(TypeId ty);
|
||||
RecursionGuard guard(TypePackId tp);
|
||||
bool isIrreducible(TypeId ty);
|
||||
bool isIrreducible(TypePackId tp);
|
||||
|
||||
void checkCacheable(TypeId ty);
|
||||
void checkCacheable(TypePackId tp);
|
||||
TypeId memoize(TypeId ty, TypeId reducedTy);
|
||||
TypePackId memoize(TypePackId tp, TypePackId reducedTp);
|
||||
|
||||
// It's either cyclic with no memoized result, so we should terminate, or
|
||||
// there is a memoized result but one that's being reduced top-down, so
|
||||
// we need to return the root of that memoized result to tighten up things.
|
||||
TypeId memoizedOr(TypeId ty) const;
|
||||
TypePackId memoizedOr(TypePackId tp) const;
|
||||
|
||||
using BinaryFold = std::optional<TypeId> (TypeReducer::*)(TypeId, TypeId);
|
||||
using UnaryFold = TypeId (TypeReducer::*)(TypeId);
|
||||
|
||||
template<typename T>
|
||||
LUAU_NOINLINE std::pair<TypeId, T*> copy(TypeId ty, const T* t)
|
||||
{
|
||||
if (auto it = copies.find(ty); it != copies.end())
|
||||
return {it->second, getMutable<T>(it->second)};
|
||||
ty = follow(ty);
|
||||
|
||||
if (auto ctx = memoizedTypes->find(ty))
|
||||
return {ctx->type, getMutable<T>(ctx->type)};
|
||||
|
||||
TypeId copiedTy = arena->addType(*t);
|
||||
copies[ty] = copiedTy;
|
||||
(*memoizedTypes)[ty] = {copiedTy, false};
|
||||
(*memoizedTypes)[copiedTy] = {copiedTy, false};
|
||||
return {copiedTy, getMutable<T>(copiedTy)};
|
||||
}
|
||||
|
||||
using Folder = std::optional<TypeId> (TypeReducer::*)(TypeId, TypeId);
|
||||
|
||||
template<typename T, typename Iter>
|
||||
void foldl_impl(Iter it, Iter endIt, Folder f, NotNull<std::vector<TypeId>> result)
|
||||
void foldl_impl(Iter it, Iter endIt, BinaryFold f, std::vector<TypeId>* result, bool* didReduce)
|
||||
{
|
||||
RecursionLimiter rl{&depth, FInt::LuauTypeReductionRecursionLimit};
|
||||
|
||||
while (it != endIt)
|
||||
{
|
||||
bool replaced = false;
|
||||
TypeId currentTy = reduce(*it);
|
||||
RecursionGuard rg = guard(*it);
|
||||
TypeId right = reduce(*it);
|
||||
*didReduce |= right != follow(*it);
|
||||
|
||||
// We're hitting a case where the `currentTy` returned a type that's the same as `T`.
|
||||
// e.g. `(string?) & ~(false | nil)` became `(string?) & (~false & ~nil)` but the current iterator we're consuming doesn't know this.
|
||||
// We will need to recurse and traverse that first.
|
||||
if (auto t = get<T>(currentTy))
|
||||
if (auto t = get<T>(right))
|
||||
{
|
||||
foldl_impl<T>(begin(t), end(t), f, result);
|
||||
foldl_impl<T>(begin(t), end(t), f, result, didReduce);
|
||||
++it;
|
||||
continue;
|
||||
}
|
||||
|
||||
bool replaced = false;
|
||||
auto resultIt = result->begin();
|
||||
while (resultIt != result->end())
|
||||
{
|
||||
TypeId& ty = *resultIt;
|
||||
|
||||
std::optional<TypeId> reduced = (this->*f)(ty, currentTy);
|
||||
if (reduced && replaced)
|
||||
TypeId left = *resultIt;
|
||||
if (left == right)
|
||||
{
|
||||
// We want to erase any other elements that occurs after the first replacement too.
|
||||
// e.g. `"a" | "b" | string` where `"a"` and `"b"` is in the `result` vector, then `string` replaces both `"a"` and `"b"`.
|
||||
// If we don't erase redundant elements, `"b"` may be kept or be replaced by `string`, leaving us with `string | string`.
|
||||
resultIt = result->erase(resultIt);
|
||||
replaced = true;
|
||||
++resultIt;
|
||||
continue;
|
||||
}
|
||||
else if (reduced && !replaced)
|
||||
|
||||
std::optional<TypeId> reduced = (this->*f)(left, right);
|
||||
if (reduced)
|
||||
{
|
||||
*resultIt = *reduced;
|
||||
++resultIt;
|
||||
replaced = true;
|
||||
ty = *reduced;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -135,21 +126,65 @@ struct TypeReducer
|
|||
}
|
||||
|
||||
if (!replaced)
|
||||
result->push_back(currentTy);
|
||||
result->push_back(right);
|
||||
|
||||
*didReduce |= replaced;
|
||||
++it;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
TypeId flatten(std::vector<TypeId>&& types)
|
||||
{
|
||||
if (types.size() == 1)
|
||||
return types[0];
|
||||
else
|
||||
return arena->addType(T{std::move(types)});
|
||||
}
|
||||
|
||||
template<typename T, typename Iter>
|
||||
TypeId foldl(Iter it, Iter endIt, Folder f)
|
||||
TypeId foldl(Iter it, Iter endIt, std::optional<TypeId> ty, BinaryFold f)
|
||||
{
|
||||
std::vector<TypeId> result;
|
||||
foldl_impl<T>(it, endIt, f, NotNull{&result});
|
||||
if (result.size() == 1)
|
||||
return result[0];
|
||||
bool didReduce = false;
|
||||
foldl_impl<T>(it, endIt, f, &result, &didReduce);
|
||||
if (!didReduce && ty)
|
||||
return *ty;
|
||||
else
|
||||
return arena->addType(T{std::move(result)});
|
||||
{
|
||||
// If we've done any reduction, then we'll need to reduce it again, e.g.
|
||||
// `"a" | "b" | string` is reduced into `string | string`, which is then reduced into `string`.
|
||||
return reduce(flatten<T>(std::move(result)));
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
TypeId apply(BinaryFold f, TypeId left, TypeId right)
|
||||
{
|
||||
left = follow(left);
|
||||
right = follow(right);
|
||||
|
||||
if (get<T>(left) || get<T>(right))
|
||||
{
|
||||
std::vector<TypeId> types{left, right};
|
||||
return foldl<T>(begin(types), end(types), std::nullopt, f);
|
||||
}
|
||||
else if (auto reduced = (this->*f)(left, right))
|
||||
return *reduced;
|
||||
else
|
||||
return arena->addType(T{{left, right}});
|
||||
}
|
||||
|
||||
template<typename Into, typename Over>
|
||||
TypeId distribute(TypeIterator<Over> it, TypeIterator<Over> endIt, BinaryFold f, TypeId ty)
|
||||
{
|
||||
std::vector<TypeId> result;
|
||||
while (it != endIt)
|
||||
{
|
||||
result.push_back(apply<Into>(f, *it, ty));
|
||||
++it;
|
||||
}
|
||||
return flatten<Over>(std::move(result));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -157,42 +192,48 @@ TypeId TypeReducer::reduce(TypeId ty)
|
|||
{
|
||||
ty = follow(ty);
|
||||
|
||||
if (std::find(seen.begin(), seen.end(), ty) != seen.end())
|
||||
return ty;
|
||||
if (auto ctx = memoizedTypes->find(ty); ctx && ctx->irreducible)
|
||||
return ctx->type;
|
||||
else if (auto cyclicTy = cyclicTypes->find(ty))
|
||||
return *cyclicTy;
|
||||
|
||||
RecursionGuard rg = guard(ty);
|
||||
checkCacheable(ty);
|
||||
RecursionLimiter rl{&depth, FInt::LuauTypeReductionRecursionLimit};
|
||||
|
||||
TypeId result = nullptr;
|
||||
if (auto i = get<IntersectionType>(ty))
|
||||
return foldl<IntersectionType>(begin(i), end(i), &TypeReducer::intersectionType);
|
||||
result = foldl<IntersectionType>(begin(i), end(i), ty, &TypeReducer::intersectionType);
|
||||
else if (auto u = get<UnionType>(ty))
|
||||
return foldl<UnionType>(begin(u), end(u), &TypeReducer::unionType);
|
||||
result = foldl<UnionType>(begin(u), end(u), ty, &TypeReducer::unionType);
|
||||
else if (get<TableType>(ty) || get<MetatableType>(ty))
|
||||
return tableType(ty);
|
||||
result = tableType(ty);
|
||||
else if (get<FunctionType>(ty))
|
||||
return functionType(ty);
|
||||
else if (auto n = get<NegationType>(ty))
|
||||
return negationType(follow(n->ty));
|
||||
result = functionType(ty);
|
||||
else if (get<NegationType>(ty))
|
||||
result = negationType(ty);
|
||||
else
|
||||
return ty;
|
||||
result = ty;
|
||||
|
||||
return memoize(ty, result);
|
||||
}
|
||||
|
||||
TypePackId TypeReducer::reduce(TypePackId tp)
|
||||
{
|
||||
tp = follow(tp);
|
||||
|
||||
if (std::find(seen.begin(), seen.end(), tp) != seen.end())
|
||||
return tp;
|
||||
if (auto ctx = memoizedTypePacks->find(tp); ctx && ctx->irreducible)
|
||||
return ctx->type;
|
||||
|
||||
RecursionGuard rg = guard(tp);
|
||||
checkCacheable(tp);
|
||||
RecursionLimiter rl{&depth, FInt::LuauTypeReductionRecursionLimit};
|
||||
|
||||
bool didReduce = false;
|
||||
TypePackIterator it = begin(tp);
|
||||
|
||||
std::vector<TypeId> head;
|
||||
while (it != end(tp))
|
||||
{
|
||||
head.push_back(reduce(*it));
|
||||
TypeId reducedTy = reduce(*it);
|
||||
head.push_back(reducedTy);
|
||||
didReduce |= follow(*it) != follow(reducedTy);
|
||||
++it;
|
||||
}
|
||||
|
||||
|
@ -200,10 +241,22 @@ TypePackId TypeReducer::reduce(TypePackId tp)
|
|||
if (tail)
|
||||
{
|
||||
if (auto vtp = get<VariadicTypePack>(follow(*it.tail())))
|
||||
tail = arena->addTypePack(VariadicTypePack{reduce(vtp->ty), vtp->hidden});
|
||||
{
|
||||
TypeId reducedTy = reduce(vtp->ty);
|
||||
if (follow(vtp->ty) != follow(reducedTy))
|
||||
{
|
||||
tail = arena->addTypePack(VariadicTypePack{reducedTy, vtp->hidden});
|
||||
didReduce = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return arena->addTypePack(TypePack{std::move(head), tail});
|
||||
if (!didReduce)
|
||||
return memoize(tp, tp);
|
||||
else if (head.empty() && tail)
|
||||
return memoize(tp, *tail);
|
||||
else
|
||||
return memoize(tp, arena->addTypePack(TypePack{std::move(head), tail}));
|
||||
}
|
||||
|
||||
std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
||||
|
@ -236,18 +289,7 @@ std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
|||
else if (get<ErrorType>(right))
|
||||
return std::nullopt; // T & error ~ T & error
|
||||
else if (auto ut = get<UnionType>(left))
|
||||
{
|
||||
std::vector<TypeId> options;
|
||||
for (TypeId option : ut)
|
||||
{
|
||||
if (auto result = intersectionType(option, right))
|
||||
options.push_back(*result);
|
||||
else
|
||||
options.push_back(arena->addType(IntersectionType{{option, right}}));
|
||||
}
|
||||
|
||||
return foldl<UnionType>(begin(options), end(options), &TypeReducer::unionType); // (A | B) & T ~ (A & T) | (B & T)
|
||||
}
|
||||
return reduce(distribute<IntersectionType>(begin(ut), end(ut), &TypeReducer::intersectionType, right)); // (A | B) & T ~ (A & T) | (B & T)
|
||||
else if (get<UnionType>(right))
|
||||
return intersectionType(right, left); // T & (A | B) ~ (A | B) & T
|
||||
else if (auto [p1, p2] = get2<PrimitiveType, PrimitiveType>(left, right); p1 && p2)
|
||||
|
@ -294,14 +336,7 @@ std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
|||
return builtinTypes->neverType; // Base & Unrelated ~ never
|
||||
}
|
||||
else if (auto [f1, f2] = get2<FunctionType, FunctionType>(left, right); f1 && f2)
|
||||
{
|
||||
if (std::find(seen.begin(), seen.end(), left) != seen.end())
|
||||
return std::nullopt;
|
||||
else if (std::find(seen.begin(), seen.end(), right) != seen.end())
|
||||
return std::nullopt;
|
||||
|
||||
return std::nullopt; // TODO
|
||||
}
|
||||
else if (auto [t1, t2] = get2<TableType, TableType>(left, right); t1 && t2)
|
||||
{
|
||||
if (t1->state == TableState::Free || t2->state == TableState::Free)
|
||||
|
@ -309,10 +344,10 @@ std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
|||
else if (t1->state == TableState::Generic || t2->state == TableState::Generic)
|
||||
return std::nullopt; // '{ x: T } & { x: U } ~ '{ x: T } & { x: U }
|
||||
|
||||
if (std::find(seen.begin(), seen.end(), left) != seen.end())
|
||||
return std::nullopt;
|
||||
else if (std::find(seen.begin(), seen.end(), right) != seen.end())
|
||||
return std::nullopt;
|
||||
if (cyclicTypes->find(left))
|
||||
return std::nullopt; // (t1 where t1 = { p: t1 }) & {} ~ t1 & {}
|
||||
else if (cyclicTypes->find(right))
|
||||
return std::nullopt; // {} & (t1 where t1 = { p: t1 }) ~ {} & t1
|
||||
|
||||
TypeId resultTy = arena->addType(TableType{});
|
||||
TableType* table = getMutable<TableType>(resultTy);
|
||||
|
@ -324,8 +359,7 @@ std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
|||
// even if we have the corresponding property in the other one.
|
||||
if (auto other = t2->props.find(name); other != t2->props.end())
|
||||
{
|
||||
std::vector<TypeId> parts{prop.type, other->second.type};
|
||||
TypeId propTy = foldl<IntersectionType>(begin(parts), end(parts), &TypeReducer::intersectionType);
|
||||
TypeId propTy = apply<IntersectionType>(&TypeReducer::intersectionType, prop.type, other->second.type);
|
||||
if (get<NeverType>(propTy))
|
||||
return builtinTypes->neverType; // { p : string } & { p : number } ~ { p : string & number } ~ { p : never } ~ never
|
||||
else
|
||||
|
@ -340,27 +374,33 @@ std::optional<TypeId> TypeReducer::intersectionType(TypeId left, TypeId right)
|
|||
// TODO: And vice versa, t2 properties against t1 indexer if it exists,
|
||||
// even if we have the corresponding property in the other one.
|
||||
if (!t1->props.count(name))
|
||||
table->props[name] = prop; // {} & { p : string } ~ { p : string }
|
||||
table->props[name] = {reduce(prop.type)}; // {} & { p : string & string } ~ { p : string }
|
||||
}
|
||||
|
||||
if (t1->indexer && t2->indexer)
|
||||
{
|
||||
std::vector<TypeId> keyParts{t1->indexer->indexType, t2->indexer->indexType};
|
||||
TypeId keyTy = foldl<IntersectionType>(begin(keyParts), end(keyParts), &TypeReducer::intersectionType);
|
||||
TypeId keyTy = apply<IntersectionType>(&TypeReducer::intersectionType, t1->indexer->indexType, t2->indexer->indexType);
|
||||
if (get<NeverType>(keyTy))
|
||||
return builtinTypes->neverType; // { [string]: _ } & { [number]: _ } ~ { [string & number]: _ } ~ { [never]: _ } ~ never
|
||||
return std::nullopt; // { [string]: _ } & { [number]: _ } ~ { [string]: _ } & { [number]: _ }
|
||||
|
||||
std::vector<TypeId> valueParts{t1->indexer->indexResultType, t2->indexer->indexResultType};
|
||||
TypeId valueTy = foldl<IntersectionType>(begin(valueParts), end(valueParts), &TypeReducer::intersectionType);
|
||||
TypeId valueTy = apply<IntersectionType>(&TypeReducer::intersectionType, t1->indexer->indexResultType, t2->indexer->indexResultType);
|
||||
if (get<NeverType>(valueTy))
|
||||
return builtinTypes->neverType; // { [_]: string } & { [_]: number } ~ { [_]: string & number } ~ { [_]: never } ~ never
|
||||
|
||||
table->indexer = TableIndexer{keyTy, valueTy};
|
||||
}
|
||||
else if (t1->indexer)
|
||||
table->indexer = t1->indexer; // { [number]: boolean } & { p : string } ~ { p : string, [number]: boolean }
|
||||
{
|
||||
TypeId keyTy = reduce(t1->indexer->indexType);
|
||||
TypeId valueTy = reduce(t1->indexer->indexResultType);
|
||||
table->indexer = TableIndexer{keyTy, valueTy}; // { [number]: boolean } & { p : string } ~ { p : string, [number]: boolean }
|
||||
}
|
||||
else if (t2->indexer)
|
||||
table->indexer = t2->indexer; // { p : string } & { [number]: boolean } ~ { p : string, [number]: boolean }
|
||||
{
|
||||
TypeId keyTy = reduce(t2->indexer->indexType);
|
||||
TypeId valueTy = reduce(t2->indexer->indexResultType);
|
||||
table->indexer = TableIndexer{keyTy, valueTy}; // { p : string } & { [number]: boolean } ~ { p : string, [number]: boolean }
|
||||
}
|
||||
|
||||
return resultTy;
|
||||
}
|
||||
|
@ -506,22 +546,7 @@ std::optional<TypeId> TypeReducer::unionType(TypeId left, TypeId right)
|
|||
return std::nullopt; // Base | Unrelated ~ Base | Unrelated
|
||||
}
|
||||
else if (auto [nt, it] = get2<NegationType, IntersectionType>(left, right); nt && it)
|
||||
{
|
||||
std::vector<TypeId> parts;
|
||||
for (TypeId option : it)
|
||||
{
|
||||
if (auto result = unionType(left, option))
|
||||
parts.push_back(*result);
|
||||
else
|
||||
{
|
||||
// TODO: does there exist a reduced form such that `~T | A` hasn't already reduced it, if `A & B` is irreducible?
|
||||
// I want to say yes, but I can't generate a case that hits this code path.
|
||||
parts.push_back(arena->addType(UnionType{{left, option}}));
|
||||
}
|
||||
}
|
||||
|
||||
return foldl<IntersectionType>(begin(parts), end(parts), &TypeReducer::intersectionType); // ~T | (A & B) ~ (~T | A) & (~T | B)
|
||||
}
|
||||
return reduce(distribute<UnionType>(begin(it), end(it), &TypeReducer::unionType, left)); // ~T | (A & B) ~ (~T | A) & (~T | B)
|
||||
else if (auto [it, nt] = get2<IntersectionType, NegationType>(left, right); it && nt)
|
||||
return unionType(right, left); // (A & B) | ~T ~ ~T | (A & B)
|
||||
else if (auto [nl, nr] = get2<NegationType, NegationType>(left, right); nl && nr)
|
||||
|
@ -628,8 +653,6 @@ std::optional<TypeId> TypeReducer::unionType(TypeId left, TypeId right)
|
|||
|
||||
TypeId TypeReducer::tableType(TypeId ty)
|
||||
{
|
||||
RecursionGuard rg = guard(ty);
|
||||
|
||||
if (auto mt = get<MetatableType>(ty))
|
||||
{
|
||||
auto [copiedTy, copied] = copy(ty, mt);
|
||||
|
@ -639,15 +662,30 @@ TypeId TypeReducer::tableType(TypeId ty)
|
|||
}
|
||||
else if (auto tt = get<TableType>(ty))
|
||||
{
|
||||
// Because of `typeof()`, we need to preserve pointer identity of free/unsealed tables so that
|
||||
// all mutations that occurs on this will be applied without leaking the implementation details.
|
||||
// As a result, we'll just use the type instead of cloning it if it's free/unsealed.
|
||||
//
|
||||
// We could choose to do in-place reductions here, but to be on the safer side, I propose that we do not.
|
||||
if (tt->state == TableState::Free || tt->state == TableState::Unsealed)
|
||||
return ty;
|
||||
|
||||
auto [copiedTy, copied] = copy(ty, tt);
|
||||
|
||||
for (auto& [name, prop] : copied->props)
|
||||
prop.type = reduce(prop.type);
|
||||
|
||||
if (auto& indexer = copied->indexer)
|
||||
{
|
||||
indexer->indexType = reduce(indexer->indexType);
|
||||
indexer->indexResultType = reduce(indexer->indexResultType);
|
||||
TypeId propTy = reduce(prop.type);
|
||||
if (get<NeverType>(propTy))
|
||||
return builtinTypes->neverType;
|
||||
else
|
||||
prop.type = propTy;
|
||||
}
|
||||
|
||||
if (copied->indexer)
|
||||
{
|
||||
TypeId keyTy = reduce(copied->indexer->indexType);
|
||||
TypeId valueTy = reduce(copied->indexer->indexResultType);
|
||||
copied->indexer = TableIndexer{keyTy, valueTy};
|
||||
}
|
||||
|
||||
for (TypeId& ty : copied->instantiatedTypeParams)
|
||||
|
@ -659,16 +697,14 @@ TypeId TypeReducer::tableType(TypeId ty)
|
|||
return copiedTy;
|
||||
}
|
||||
else
|
||||
handle->ice("Unexpected type in TypeReducer::tableType");
|
||||
handle->ice("TypeReducer::tableType expects a TableType or MetatableType");
|
||||
}
|
||||
|
||||
TypeId TypeReducer::functionType(TypeId ty)
|
||||
{
|
||||
RecursionGuard rg = guard(ty);
|
||||
|
||||
const FunctionType* f = get<FunctionType>(ty);
|
||||
if (!f)
|
||||
handle->ice("TypeReducer::reduce expects a FunctionType");
|
||||
handle->ice("TypeReducer::functionType expects a FunctionType");
|
||||
|
||||
// TODO: once we have bounded quantification, we need to be able to reduce the generic bounds.
|
||||
auto [copiedTy, copied] = copy(ty, f);
|
||||
|
@ -679,140 +715,238 @@ TypeId TypeReducer::functionType(TypeId ty)
|
|||
|
||||
TypeId TypeReducer::negationType(TypeId ty)
|
||||
{
|
||||
RecursionGuard rg = guard(ty);
|
||||
const NegationType* n = get<NegationType>(ty);
|
||||
if (!n)
|
||||
return arena->addType(NegationType{ty});
|
||||
|
||||
if (auto nn = get<NegationType>(ty))
|
||||
if (auto nn = get<NegationType>(n->ty))
|
||||
return nn->ty; // ~~T ~ T
|
||||
else if (get<NeverType>(ty))
|
||||
else if (get<NeverType>(n->ty))
|
||||
return builtinTypes->unknownType; // ~never ~ unknown
|
||||
else if (get<UnknownType>(ty))
|
||||
else if (get<UnknownType>(n->ty))
|
||||
return builtinTypes->neverType; // ~unknown ~ never
|
||||
else if (get<AnyType>(ty))
|
||||
else if (get<AnyType>(n->ty))
|
||||
return builtinTypes->anyType; // ~any ~ any
|
||||
else if (auto ni = get<IntersectionType>(ty))
|
||||
else if (auto ni = get<IntersectionType>(n->ty))
|
||||
{
|
||||
std::vector<TypeId> options;
|
||||
for (TypeId part : ni)
|
||||
options.push_back(negationType(part));
|
||||
return foldl<UnionType>(begin(options), end(options), &TypeReducer::unionType); // ~(T & U) ~ (~T | ~U)
|
||||
options.push_back(negationType(arena->addType(NegationType{part})));
|
||||
return reduce(flatten<UnionType>(std::move(options))); // ~(T & U) ~ (~T | ~U)
|
||||
}
|
||||
else if (auto nu = get<UnionType>(ty))
|
||||
else if (auto nu = get<UnionType>(n->ty))
|
||||
{
|
||||
std::vector<TypeId> parts;
|
||||
for (TypeId option : nu)
|
||||
parts.push_back(negationType(option));
|
||||
return foldl<IntersectionType>(begin(parts), end(parts), &TypeReducer::intersectionType); // ~(T | U) ~ (~T & ~U)
|
||||
parts.push_back(negationType(arena->addType(NegationType{option})));
|
||||
return reduce(flatten<IntersectionType>(std::move(parts))); // ~(T | U) ~ (~T & ~U)
|
||||
}
|
||||
else
|
||||
return arena->addType(NegationType{ty}); // for all T except the ones handled above, ~T ~ ~T
|
||||
return ty; // for all T except the ones handled above, ~T ~ ~T
|
||||
}
|
||||
|
||||
RecursionGuard TypeReducer::guard(TypeId ty)
|
||||
bool TypeReducer::isIrreducible(TypeId ty)
|
||||
{
|
||||
seen.push_back(ty);
|
||||
return RecursionGuard{&depth, FInt::LuauTypeReductionRecursionLimit, &seen};
|
||||
}
|
||||
|
||||
RecursionGuard TypeReducer::guard(TypePackId tp)
|
||||
{
|
||||
seen.push_back(tp);
|
||||
return RecursionGuard{&depth, FInt::LuauTypeReductionRecursionLimit, &seen};
|
||||
}
|
||||
|
||||
void TypeReducer::checkCacheable(TypeId ty)
|
||||
{
|
||||
if (!cacheOk)
|
||||
return;
|
||||
|
||||
ty = follow(ty);
|
||||
|
||||
// Only does shallow check, the TypeReducer itself already does deep traversal.
|
||||
if (get<FreeType>(ty) || get<BlockedType>(ty) || get<PendingExpansionType>(ty))
|
||||
cacheOk = false;
|
||||
if (auto ctx = memoizedTypes->find(ty); ctx && ctx->irreducible)
|
||||
return true;
|
||||
else if (get<FreeType>(ty) || get<BlockedType>(ty) || get<PendingExpansionType>(ty))
|
||||
return false;
|
||||
else if (auto tt = get<TableType>(ty); tt && (tt->state == TableState::Free || tt->state == TableState::Unsealed))
|
||||
cacheOk = false;
|
||||
return false;
|
||||
else
|
||||
return true;
|
||||
}
|
||||
|
||||
void TypeReducer::checkCacheable(TypePackId tp)
|
||||
bool TypeReducer::isIrreducible(TypePackId tp)
|
||||
{
|
||||
if (!cacheOk)
|
||||
return;
|
||||
|
||||
tp = follow(tp);
|
||||
|
||||
// Only does shallow check, the TypeReducer itself already does deep traversal.
|
||||
if (get<FreeTypePack>(tp) || get<BlockedTypePack>(tp))
|
||||
cacheOk = false;
|
||||
if (auto ctx = memoizedTypePacks->find(tp); ctx && ctx->irreducible)
|
||||
return true;
|
||||
else if (get<FreeTypePack>(tp) || get<BlockedTypePack>(tp))
|
||||
return false;
|
||||
else if (auto vtp = get<VariadicTypePack>(tp))
|
||||
return isIrreducible(vtp->ty);
|
||||
else
|
||||
return true;
|
||||
}
|
||||
|
||||
TypeId TypeReducer::memoize(TypeId ty, TypeId reducedTy)
|
||||
{
|
||||
ty = follow(ty);
|
||||
reducedTy = follow(reducedTy);
|
||||
|
||||
// The irreducibility of this [`reducedTy`] depends on whether its contents are themselves irreducible.
|
||||
// We don't need to recurse much further than that, because we already record the irreducibility from
|
||||
// the bottom up.
|
||||
bool irreducible = isIrreducible(reducedTy);
|
||||
if (auto it = get<IntersectionType>(reducedTy))
|
||||
{
|
||||
for (TypeId part : it)
|
||||
irreducible &= isIrreducible(part);
|
||||
}
|
||||
else if (auto ut = get<UnionType>(reducedTy))
|
||||
{
|
||||
for (TypeId option : ut)
|
||||
irreducible &= isIrreducible(option);
|
||||
}
|
||||
else if (auto tt = get<TableType>(reducedTy))
|
||||
{
|
||||
for (auto& [k, p] : tt->props)
|
||||
irreducible &= isIrreducible(p.type);
|
||||
|
||||
if (tt->indexer)
|
||||
{
|
||||
irreducible &= isIrreducible(tt->indexer->indexType);
|
||||
irreducible &= isIrreducible(tt->indexer->indexResultType);
|
||||
}
|
||||
|
||||
for (auto ta : tt->instantiatedTypeParams)
|
||||
irreducible &= isIrreducible(ta);
|
||||
|
||||
for (auto tpa : tt->instantiatedTypePackParams)
|
||||
irreducible &= isIrreducible(tpa);
|
||||
}
|
||||
else if (auto mt = get<MetatableType>(reducedTy))
|
||||
{
|
||||
irreducible &= isIrreducible(mt->table);
|
||||
irreducible &= isIrreducible(mt->metatable);
|
||||
}
|
||||
else if (auto ft = get<FunctionType>(reducedTy))
|
||||
{
|
||||
irreducible &= isIrreducible(ft->argTypes);
|
||||
irreducible &= isIrreducible(ft->retTypes);
|
||||
}
|
||||
else if (auto nt = get<NegationType>(reducedTy))
|
||||
irreducible &= isIrreducible(nt->ty);
|
||||
|
||||
(*memoizedTypes)[ty] = {reducedTy, irreducible};
|
||||
(*memoizedTypes)[reducedTy] = {reducedTy, irreducible};
|
||||
return reducedTy;
|
||||
}
|
||||
|
||||
TypePackId TypeReducer::memoize(TypePackId tp, TypePackId reducedTp)
|
||||
{
|
||||
tp = follow(tp);
|
||||
reducedTp = follow(reducedTp);
|
||||
|
||||
bool irreducible = isIrreducible(reducedTp);
|
||||
TypePackIterator it = begin(tp);
|
||||
while (it != end(tp))
|
||||
{
|
||||
irreducible &= isIrreducible(*it);
|
||||
++it;
|
||||
}
|
||||
|
||||
if (it.tail())
|
||||
irreducible &= isIrreducible(*it.tail());
|
||||
|
||||
(*memoizedTypePacks)[tp] = {reducedTp, irreducible};
|
||||
(*memoizedTypePacks)[reducedTp] = {reducedTp, irreducible};
|
||||
return reducedTp;
|
||||
}
|
||||
|
||||
TypeId TypeReducer::memoizedOr(TypeId ty) const
|
||||
{
|
||||
ty = follow(ty);
|
||||
|
||||
if (auto ctx = memoizedTypes->find(ty))
|
||||
return ctx->type;
|
||||
else
|
||||
return ty;
|
||||
};
|
||||
|
||||
TypePackId TypeReducer::memoizedOr(TypePackId tp) const
|
||||
{
|
||||
tp = follow(tp);
|
||||
|
||||
if (auto ctx = memoizedTypePacks->find(tp))
|
||||
return ctx->type;
|
||||
else
|
||||
return tp;
|
||||
};
|
||||
|
||||
struct MarkCycles : TypeVisitor
|
||||
{
|
||||
DenseHashSet<TypeId> cyclicTypes{nullptr};
|
||||
|
||||
void cycle(TypeId ty) override
|
||||
{
|
||||
cyclicTypes.insert(ty);
|
||||
}
|
||||
|
||||
bool visit(TypeId ty) override
|
||||
{
|
||||
return !cyclicTypes.find(ty);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TypeReduction::TypeReduction(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> handle)
|
||||
TypeReduction::TypeReduction(
|
||||
NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> handle, const TypeReductionOptions& opts)
|
||||
: arena(arena)
|
||||
, builtinTypes(builtinTypes)
|
||||
, handle(handle)
|
||||
, options(opts)
|
||||
{
|
||||
}
|
||||
|
||||
std::optional<TypeId> TypeReduction::reduce(TypeId ty)
|
||||
{
|
||||
if (auto found = cachedTypes.find(ty))
|
||||
return *found;
|
||||
ty = follow(ty);
|
||||
|
||||
auto [reducedTy, cacheOk] = reduceImpl(ty);
|
||||
if (cacheOk)
|
||||
cachedTypes[ty] = *reducedTy;
|
||||
if (FFlag::DebugLuauDontReduceTypes)
|
||||
return ty;
|
||||
else if (!options.allowTypeReductionsFromOtherArenas && ty->owningArena != arena)
|
||||
return ty;
|
||||
else if (auto ctx = memoizedTypes.find(ty); ctx && ctx->irreducible)
|
||||
return ctx->type;
|
||||
else if (hasExceededCartesianProductLimit(ty))
|
||||
return std::nullopt;
|
||||
|
||||
return reducedTy;
|
||||
try
|
||||
{
|
||||
MarkCycles finder;
|
||||
finder.traverse(ty);
|
||||
|
||||
TypeReducer reducer{arena, builtinTypes, handle, &memoizedTypes, &memoizedTypePacks, &finder.cyclicTypes};
|
||||
return reducer.reduce(ty);
|
||||
}
|
||||
catch (const RecursionLimitException&)
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<TypePackId> TypeReduction::reduce(TypePackId tp)
|
||||
{
|
||||
if (auto found = cachedTypePacks.find(tp))
|
||||
return *found;
|
||||
tp = follow(tp);
|
||||
|
||||
auto [reducedTp, cacheOk] = reduceImpl(tp);
|
||||
if (cacheOk)
|
||||
cachedTypePacks[tp] = *reducedTp;
|
||||
|
||||
return reducedTp;
|
||||
}
|
||||
|
||||
std::pair<std::optional<TypeId>, bool> TypeReduction::reduceImpl(TypeId ty)
|
||||
{
|
||||
if (FFlag::DebugLuauDontReduceTypes)
|
||||
return {ty, false};
|
||||
|
||||
if (hasExceededCartesianProductLimit(ty))
|
||||
return {std::nullopt, false};
|
||||
return tp;
|
||||
else if (!options.allowTypeReductionsFromOtherArenas && tp->owningArena != arena)
|
||||
return tp;
|
||||
else if (auto ctx = memoizedTypePacks.find(tp); ctx && ctx->irreducible)
|
||||
return ctx->type;
|
||||
else if (hasExceededCartesianProductLimit(tp))
|
||||
return std::nullopt;
|
||||
|
||||
try
|
||||
{
|
||||
TypeReducer reducer{arena, builtinTypes, handle};
|
||||
return {reducer.reduce(ty), reducer.cacheOk};
|
||||
MarkCycles finder;
|
||||
finder.traverse(tp);
|
||||
|
||||
TypeReducer reducer{arena, builtinTypes, handle, &memoizedTypes, &memoizedTypePacks, &finder.cyclicTypes};
|
||||
return reducer.reduce(tp);
|
||||
}
|
||||
catch (const RecursionLimitException&)
|
||||
{
|
||||
return {std::nullopt, false};
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<std::optional<TypePackId>, bool> TypeReduction::reduceImpl(TypePackId tp)
|
||||
{
|
||||
if (FFlag::DebugLuauDontReduceTypes)
|
||||
return {tp, false};
|
||||
|
||||
if (hasExceededCartesianProductLimit(tp))
|
||||
return {std::nullopt, false};
|
||||
|
||||
try
|
||||
{
|
||||
TypeReducer reducer{arena, builtinTypes, handle};
|
||||
return {reducer.reduce(tp), reducer.cacheOk};
|
||||
}
|
||||
catch (const RecursionLimitException&)
|
||||
{
|
||||
return {std::nullopt, false};
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Luau/Unifiable.h"
|
||||
|
||||
LUAU_FASTFLAG(LuauTypeNormalization2);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace Unifiable
|
||||
|
@ -11,19 +9,19 @@ namespace Unifiable
|
|||
static int nextIndex = 0;
|
||||
|
||||
Free::Free(TypeLevel level)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, level(level)
|
||||
{
|
||||
}
|
||||
|
||||
Free::Free(Scope* scope)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, scope(scope)
|
||||
{
|
||||
}
|
||||
|
||||
Free::Free(Scope* scope, TypeLevel level)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, level(level)
|
||||
, scope(scope)
|
||||
{
|
||||
|
@ -32,33 +30,33 @@ Free::Free(Scope* scope, TypeLevel level)
|
|||
int Free::DEPRECATED_nextIndex = 0;
|
||||
|
||||
Generic::Generic()
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, name("g" + std::to_string(index))
|
||||
{
|
||||
}
|
||||
|
||||
Generic::Generic(TypeLevel level)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, level(level)
|
||||
, name("g" + std::to_string(index))
|
||||
{
|
||||
}
|
||||
|
||||
Generic::Generic(const Name& name)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, name(name)
|
||||
, explicitName(true)
|
||||
{
|
||||
}
|
||||
|
||||
Generic::Generic(Scope* scope)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, scope(scope)
|
||||
{
|
||||
}
|
||||
|
||||
Generic::Generic(TypeLevel level, const Name& name)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, level(level)
|
||||
, name(name)
|
||||
, explicitName(true)
|
||||
|
@ -66,7 +64,7 @@ Generic::Generic(TypeLevel level, const Name& name)
|
|||
}
|
||||
|
||||
Generic::Generic(Scope* scope, const Name& name)
|
||||
: index(FFlag::LuauTypeNormalization2 ? ++nextIndex : ++DEPRECATED_nextIndex)
|
||||
: index(++nextIndex)
|
||||
, scope(scope)
|
||||
, name(name)
|
||||
, explicitName(true)
|
||||
|
|
|
@ -18,15 +18,13 @@
|
|||
LUAU_FASTINT(LuauTypeInferTypePackLoopLimit);
|
||||
LUAU_FASTFLAG(LuauErrorRecoveryType);
|
||||
LUAU_FASTFLAG(LuauUnknownAndNeverType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReportTypeMismatchForTypePackUnificationFailure, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauSubtypeNormalizer, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauScalarShapeSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUnifyAnyTxnLog, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauOverloadedFunctionSubtypingPerf, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauScalarShapeUnifyToMtOwner2, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUninhabitedSubAnything2, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMaintainScopesInUnifier, false)
|
||||
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
|
||||
LUAU_FASTFLAG(LuauTxnLogTypePackIterator)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauNegatedFunctionTypes)
|
||||
LUAU_FASTFLAG(LuauNegatedClassTypes)
|
||||
|
@ -378,7 +376,7 @@ Unifier::Unifier(NotNull<Normalizer> normalizer, Mode mode, NotNull<Scope> scope
|
|||
, variance(variance)
|
||||
, sharedState(*normalizer->sharedState)
|
||||
{
|
||||
normalize = FFlag::LuauSubtypeNormalizer;
|
||||
normalize = true;
|
||||
LUAU_ASSERT(sharedState.iceHandler);
|
||||
}
|
||||
|
||||
|
@ -480,6 +478,28 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
|
|||
return;
|
||||
}
|
||||
|
||||
if (FFlag::LuauUnifyAnyTxnLog)
|
||||
{
|
||||
if (log.get<AnyType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->anyType);
|
||||
|
||||
if (log.get<ErrorType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->errorType);
|
||||
|
||||
if (log.get<UnknownType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->unknownType);
|
||||
|
||||
if (log.get<AnyType>(subTy))
|
||||
return tryUnifyWithAny(superTy, builtinTypes->anyType);
|
||||
|
||||
if (log.get<ErrorType>(subTy))
|
||||
return tryUnifyWithAny(superTy, builtinTypes->errorType);
|
||||
|
||||
if (log.get<NeverType>(subTy))
|
||||
return tryUnifyWithAny(superTy, builtinTypes->neverType);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (get<ErrorType>(superTy) || get<AnyType>(superTy) || get<UnknownType>(superTy))
|
||||
return tryUnifyWithAny(subTy, superTy);
|
||||
|
||||
|
@ -491,6 +511,7 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
|
|||
|
||||
if (log.get<NeverType>(subTy))
|
||||
return tryUnifyWithAny(superTy, subTy);
|
||||
}
|
||||
|
||||
auto& cache = sharedState.cachedUnify;
|
||||
|
||||
|
@ -524,10 +545,6 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
|
|||
{
|
||||
tryUnifyUnionWithType(subTy, subUnion, superTy);
|
||||
}
|
||||
else if (const UnionType* uv = (FFlag::LuauSubtypeNormalizer ? nullptr : log.getMutable<UnionType>(superTy)))
|
||||
{
|
||||
tryUnifyTypeWithUnion(subTy, superTy, uv, cacheEnabled, isFunctionCall);
|
||||
}
|
||||
else if (const IntersectionType* uv = log.getMutable<IntersectionType>(superTy))
|
||||
{
|
||||
tryUnifyTypeWithIntersection(subTy, superTy, uv);
|
||||
|
@ -915,8 +932,6 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
|||
void Unifier::tryUnifyNormalizedTypes(
|
||||
TypeId subTy, TypeId superTy, const NormalizedType& subNorm, const NormalizedType& superNorm, std::string reason, std::optional<TypeError> error)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauSubtypeNormalizer);
|
||||
|
||||
if (get<UnknownType>(superNorm.tops) || get<AnyType>(superNorm.tops) || get<AnyType>(subNorm.tops))
|
||||
return;
|
||||
else if (get<UnknownType>(subNorm.tops))
|
||||
|
@ -1095,13 +1110,10 @@ TypePackId Unifier::tryApplyOverloadedFunction(TypeId function, const Normalized
|
|||
{
|
||||
log.concat(std::move(innerState.log));
|
||||
if (result)
|
||||
{
|
||||
if (FFlag::LuauOverloadedFunctionSubtypingPerf)
|
||||
{
|
||||
innerState.log.clear();
|
||||
innerState.tryUnify_(*result, ftv->retTypes);
|
||||
}
|
||||
if (FFlag::LuauOverloadedFunctionSubtypingPerf && innerState.errors.empty())
|
||||
if (innerState.errors.empty())
|
||||
log.concat(std::move(innerState.log));
|
||||
// Annoyingly, since we don't support intersection of generic type packs,
|
||||
// the intersection may fail. We rather arbitrarily use the first matching overload
|
||||
|
@ -1250,8 +1262,11 @@ struct WeirdIter
|
|||
LUAU_ASSERT(canGrow());
|
||||
LUAU_ASSERT(log.getMutable<TypePack>(newTail));
|
||||
|
||||
level = log.getMutable<Unifiable::Free>(packId)->level;
|
||||
scope = log.getMutable<Unifiable::Free>(packId)->scope;
|
||||
auto freePack = log.getMutable<Unifiable::Free>(packId);
|
||||
|
||||
level = freePack->level;
|
||||
if (FFlag::LuauMaintainScopesInUnifier && freePack->scope != nullptr)
|
||||
scope = freePack->scope;
|
||||
log.replace(packId, BoundTypePack(newTail));
|
||||
packId = newTail;
|
||||
pack = log.getMutable<TypePack>(newTail);
|
||||
|
@ -1380,6 +1395,12 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
|
|||
auto superIter = WeirdIter(superTp, log);
|
||||
auto subIter = WeirdIter(subTp, log);
|
||||
|
||||
if (FFlag::LuauMaintainScopesInUnifier)
|
||||
{
|
||||
superIter.scope = scope.get();
|
||||
subIter.scope = scope.get();
|
||||
}
|
||||
|
||||
auto mkFreshType = [this](Scope* scope, TypeLevel level) {
|
||||
return types->freshType(scope, level);
|
||||
};
|
||||
|
@ -1420,15 +1441,9 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
|
|||
// If both are at the end, we're done
|
||||
if (!superIter.good() && !subIter.good())
|
||||
{
|
||||
if (!FFlag::LuauTxnLogTypePackIterator && subTpv->tail && superTpv->tail)
|
||||
{
|
||||
tryUnify_(*subTpv->tail, *superTpv->tail);
|
||||
break;
|
||||
}
|
||||
|
||||
const bool lFreeTail = superTpv->tail && log.getMutable<FreeTypePack>(log.follow(*superTpv->tail)) != nullptr;
|
||||
const bool rFreeTail = subTpv->tail && log.getMutable<FreeTypePack>(log.follow(*subTpv->tail)) != nullptr;
|
||||
if (FFlag::LuauTxnLogTypePackIterator && lFreeTail && rFreeTail)
|
||||
if (lFreeTail && rFreeTail)
|
||||
{
|
||||
tryUnify_(*subTpv->tail, *superTpv->tail);
|
||||
}
|
||||
|
@ -1440,7 +1455,7 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
|
|||
{
|
||||
tryUnify_(emptyTp, *subTpv->tail);
|
||||
}
|
||||
else if (FFlag::LuauTxnLogTypePackIterator && subTpv->tail && superTpv->tail)
|
||||
else if (subTpv->tail && superTpv->tail)
|
||||
{
|
||||
if (log.getMutable<VariadicTypePack>(superIter.packId))
|
||||
tryUnifyVariadics(subIter.packId, superIter.packId, false, int(subIter.index));
|
||||
|
@ -1523,10 +1538,7 @@ void Unifier::tryUnify_(TypePackId subTp, TypePackId superTp, bool isFunctionCal
|
|||
}
|
||||
else
|
||||
{
|
||||
if (FFlag::LuauReportTypeMismatchForTypePackUnificationFailure)
|
||||
reportError(location, TypePackMismatch{subTp, superTp});
|
||||
else
|
||||
reportError(location, GenericError{"Failed to unify type packs"});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2356,11 +2368,11 @@ void Unifier::tryUnifyVariadics(TypePackId subTp, TypePackId superTp, bool rever
|
|||
if (!superVariadic)
|
||||
ice("passed non-variadic pack to tryUnifyVariadics");
|
||||
|
||||
if (const VariadicTypePack* subVariadic = FFlag::LuauTxnLogTypePackIterator ? log.get<VariadicTypePack>(subTp) : get<VariadicTypePack>(subTp))
|
||||
if (const VariadicTypePack* subVariadic = log.get<VariadicTypePack>(subTp))
|
||||
{
|
||||
tryUnify_(reversed ? superVariadic->ty : subVariadic->ty, reversed ? subVariadic->ty : superVariadic->ty);
|
||||
}
|
||||
else if (FFlag::LuauTxnLogTypePackIterator ? log.get<TypePack>(subTp) : get<TypePack>(subTp))
|
||||
else if (log.get<TypePack>(subTp))
|
||||
{
|
||||
TypePackIterator subIter = begin(subTp, &log);
|
||||
TypePackIterator subEnd = end(subTp);
|
||||
|
@ -2465,9 +2477,18 @@ void Unifier::tryUnifyWithAny(TypeId subTy, TypeId anyTy)
|
|||
{
|
||||
LUAU_ASSERT(get<AnyType>(anyTy) || get<ErrorType>(anyTy) || get<UnknownType>(anyTy) || get<NeverType>(anyTy));
|
||||
|
||||
if (FFlag::LuauUnifyAnyTxnLog)
|
||||
{
|
||||
// These types are not visited in general loop below
|
||||
if (log.get<PrimitiveType>(subTy) || log.get<AnyType>(subTy) || log.get<ClassType>(subTy))
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
// These types are not visited in general loop below
|
||||
if (get<PrimitiveType>(subTy) || get<AnyType>(subTy) || get<ClassType>(subTy))
|
||||
return;
|
||||
}
|
||||
|
||||
TypePackId anyTp;
|
||||
if (FFlag::LuauUnknownAndNeverType)
|
||||
|
|
|
@ -983,12 +983,13 @@ class AstStatTypeAlias : public AstStat
|
|||
public:
|
||||
LUAU_RTTI(AstStatTypeAlias)
|
||||
|
||||
AstStatTypeAlias(const Location& location, const AstName& name, const AstArray<AstGenericType>& generics,
|
||||
AstStatTypeAlias(const Location& location, const AstName& name, const Location& nameLocation, const AstArray<AstGenericType>& generics,
|
||||
const AstArray<AstGenericTypePack>& genericPacks, AstType* type, bool exported);
|
||||
|
||||
void visit(AstVisitor* visitor) override;
|
||||
|
||||
AstName name;
|
||||
Location nameLocation;
|
||||
AstArray<AstGenericType> generics;
|
||||
AstArray<AstGenericTypePack> genericPacks;
|
||||
AstType* type;
|
||||
|
|
|
@ -647,10 +647,11 @@ void AstStatLocalFunction::visit(AstVisitor* visitor)
|
|||
func->visit(visitor);
|
||||
}
|
||||
|
||||
AstStatTypeAlias::AstStatTypeAlias(const Location& location, const AstName& name, const AstArray<AstGenericType>& generics,
|
||||
const AstArray<AstGenericTypePack>& genericPacks, AstType* type, bool exported)
|
||||
AstStatTypeAlias::AstStatTypeAlias(const Location& location, const AstName& name, const Location& nameLocation,
|
||||
const AstArray<AstGenericType>& generics, const AstArray<AstGenericTypePack>& genericPacks, AstType* type, bool exported)
|
||||
: AstStat(ClassIndex(), location)
|
||||
, name(name)
|
||||
, nameLocation(nameLocation)
|
||||
, generics(generics)
|
||||
, genericPacks(genericPacks)
|
||||
, type(type)
|
||||
|
|
|
@ -768,7 +768,7 @@ AstStat* Parser::parseTypeAlias(const Location& start, bool exported)
|
|||
|
||||
AstType* type = parseTypeAnnotation();
|
||||
|
||||
return allocator.alloc<AstStatTypeAlias>(Location(start, type->location), name->name, generics, genericPacks, type, exported);
|
||||
return allocator.alloc<AstStatTypeAlias>(Location(start, type->location), name->name, name->location, generics, genericPacks, type, exported);
|
||||
}
|
||||
|
||||
AstDeclaredClassProp Parser::parseDeclaredClassMethod()
|
||||
|
|
|
@ -244,7 +244,7 @@ static int emitInst(AssemblyBuilderX64& build, NativeState& data, ModuleHelpers&
|
|||
emitInstForNPrep(build, pc, i, labelarr[i + 1 + LUAU_INSN_D(*pc)]);
|
||||
break;
|
||||
case LOP_FORNLOOP:
|
||||
emitInstForNLoop(build, pc, i, labelarr[i + 1 + LUAU_INSN_D(*pc)]);
|
||||
emitInstForNLoop(build, pc, i, labelarr[i + 1 + LUAU_INSN_D(*pc)], next);
|
||||
break;
|
||||
case LOP_FORGLOOP:
|
||||
emitinstForGLoop(build, pc, i, labelarr[i + 1 + LUAU_INSN_D(*pc)], next, fallback);
|
||||
|
|
|
@ -251,7 +251,7 @@ void callGetFastTmOrFallback(AssemblyBuilderX64& build, RegisterX64 table, TMS t
|
|||
// rArg1 is already prepared
|
||||
build.mov(rArg2, tm);
|
||||
build.mov(rax, qword[rState + offsetof(lua_State, global)]);
|
||||
build.mov(rArg3, qword[rax + offsetof(global_State, tmname[tm])]);
|
||||
build.mov(rArg3, qword[rax + offsetof(global_State, tmname) + tm * sizeof(TString*)]);
|
||||
build.call(qword[rNativeContext + offsetof(NativeContext, luaT_gettm)]);
|
||||
}
|
||||
|
||||
|
|
|
@ -1010,17 +1010,15 @@ static int emitInstFastCallN(
|
|||
|
||||
if (nparams == LUA_MULTRET)
|
||||
{
|
||||
// TODO: for SystemV ABI we can compute the result directly into rArg6
|
||||
// L->top - (ra + 1)
|
||||
build.mov(rcx, qword[rState + offsetof(lua_State, top)]);
|
||||
RegisterX64 reg = (build.abi == ABIX64::Windows) ? rcx : rArg6;
|
||||
build.mov(reg, qword[rState + offsetof(lua_State, top)]);
|
||||
build.lea(rdx, addr[rBase + (ra + 1) * sizeof(TValue)]);
|
||||
build.sub(rcx, rdx);
|
||||
build.shr(rcx, kTValueSizeLog2);
|
||||
build.sub(reg, rdx);
|
||||
build.shr(reg, kTValueSizeLog2);
|
||||
|
||||
if (build.abi == ABIX64::Windows)
|
||||
build.mov(sArg6, rcx);
|
||||
else
|
||||
build.mov(rArg6, rcx);
|
||||
build.mov(sArg6, reg);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -1126,7 +1124,7 @@ void emitInstForNPrep(AssemblyBuilderX64& build, const Instruction* pc, int pcpo
|
|||
build.setLabel(exit);
|
||||
}
|
||||
|
||||
void emitInstForNLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat)
|
||||
void emitInstForNLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat, Label& loopExit)
|
||||
{
|
||||
emitInterrupt(build, pcpos);
|
||||
|
||||
|
@ -1144,20 +1142,18 @@ void emitInstForNLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpo
|
|||
build.vaddsd(idx, idx, step);
|
||||
build.vmovsd(luauRegValue(ra + 2), idx);
|
||||
|
||||
Label reverse, exit;
|
||||
Label reverse;
|
||||
|
||||
// step <= 0
|
||||
jumpOnNumberCmp(build, noreg, step, zero, ConditionX64::LessEqual, reverse);
|
||||
|
||||
// false: idx <= limit
|
||||
jumpOnNumberCmp(build, noreg, idx, limit, ConditionX64::LessEqual, loopRepeat);
|
||||
build.jmp(exit);
|
||||
build.jmp(loopExit);
|
||||
|
||||
// true: limit <= idx
|
||||
build.setLabel(reverse);
|
||||
jumpOnNumberCmp(build, noreg, limit, idx, ConditionX64::LessEqual, loopRepeat);
|
||||
|
||||
build.setLabel(exit);
|
||||
}
|
||||
|
||||
void emitinstForGLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat, Label& loopExit, Label& fallback)
|
||||
|
|
|
@ -61,7 +61,7 @@ int emitInstFastCall2(AssemblyBuilderX64& build, const Instruction* pc, int pcpo
|
|||
int emitInstFastCall2K(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& fallback);
|
||||
int emitInstFastCall(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& fallback);
|
||||
void emitInstForNPrep(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopExit);
|
||||
void emitInstForNLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat);
|
||||
void emitInstForNLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat, Label& loopExit);
|
||||
void emitinstForGLoop(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat, Label& loopExit, Label& fallback);
|
||||
void emitinstForGLoopFallback(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, Label& loopRepeat);
|
||||
void emitInstForGPrepNext(AssemblyBuilderX64& build, const Instruction* pc, Label& target, Label& fallback);
|
||||
|
|
563
CodeGen/src/IrBuilder.cpp
Normal file
563
CodeGen/src/IrBuilder.cpp
Normal file
|
@ -0,0 +1,563 @@
|
|||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "IrBuilder.h"
|
||||
|
||||
#include "Luau/Common.h"
|
||||
|
||||
#include "CustomExecUtils.h"
|
||||
#include "IrTranslation.h"
|
||||
#include "IrUtils.h"
|
||||
|
||||
#include "lapi.h"
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
constexpr unsigned kNoAssociatedBlockIndex = ~0u;
|
||||
|
||||
void IrBuilder::buildFunctionIr(Proto* proto)
|
||||
{
|
||||
function.proto = proto;
|
||||
|
||||
// Rebuild original control flow blocks
|
||||
rebuildBytecodeBasicBlocks(proto);
|
||||
|
||||
function.bcMapping.resize(proto->sizecode, {~0u, 0});
|
||||
|
||||
// Translate all instructions to IR inside blocks
|
||||
for (int i = 0; i < proto->sizecode;)
|
||||
{
|
||||
const Instruction* pc = &proto->code[i];
|
||||
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
|
||||
|
||||
int nexti = i + getOpLength(op);
|
||||
LUAU_ASSERT(nexti <= proto->sizecode);
|
||||
|
||||
function.bcMapping[i] = {uint32_t(function.instructions.size()), 0};
|
||||
|
||||
// Begin new block at this instruction if it was in the bytecode or requested during translation
|
||||
if (instIndexToBlock[i] != kNoAssociatedBlockIndex)
|
||||
beginBlock(blockAtInst(i));
|
||||
|
||||
translateInst(op, pc, i);
|
||||
|
||||
i = nexti;
|
||||
LUAU_ASSERT(i <= proto->sizecode);
|
||||
|
||||
// If we are going into a new block at the next instruction and it's a fallthrough, jump has to be placed to mark block termination
|
||||
if (i < int(instIndexToBlock.size()) && instIndexToBlock[i] != kNoAssociatedBlockIndex)
|
||||
{
|
||||
if (!isBlockTerminator(function.instructions.back().cmd))
|
||||
inst(IrCmd::JUMP, blockAtInst(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void IrBuilder::rebuildBytecodeBasicBlocks(Proto* proto)
|
||||
{
|
||||
instIndexToBlock.resize(proto->sizecode, kNoAssociatedBlockIndex);
|
||||
|
||||
// Mark jump targets
|
||||
std::vector<uint8_t> jumpTargets(proto->sizecode, 0);
|
||||
|
||||
for (int i = 0; i < proto->sizecode;)
|
||||
{
|
||||
const Instruction* pc = &proto->code[i];
|
||||
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
|
||||
|
||||
int target = getJumpTarget(*pc, uint32_t(i));
|
||||
|
||||
if (target >= 0 && !isFastCall(op))
|
||||
jumpTargets[target] = true;
|
||||
|
||||
i += getOpLength(op);
|
||||
LUAU_ASSERT(i <= proto->sizecode);
|
||||
}
|
||||
|
||||
|
||||
// Bytecode blocks are created at bytecode jump targets and the start of a function
|
||||
jumpTargets[0] = true;
|
||||
|
||||
for (int i = 0; i < proto->sizecode; i++)
|
||||
{
|
||||
if (jumpTargets[i])
|
||||
{
|
||||
IrOp b = block(IrBlockKind::Bytecode);
|
||||
instIndexToBlock[i] = b.index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void IrBuilder::translateInst(LuauOpcode op, const Instruction* pc, int i)
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
case LOP_NOP:
|
||||
break;
|
||||
case LOP_LOADNIL:
|
||||
translateInstLoadNil(*this, pc);
|
||||
break;
|
||||
case LOP_LOADB:
|
||||
translateInstLoadB(*this, pc, i);
|
||||
break;
|
||||
case LOP_LOADN:
|
||||
translateInstLoadN(*this, pc);
|
||||
break;
|
||||
case LOP_LOADK:
|
||||
translateInstLoadK(*this, pc);
|
||||
break;
|
||||
case LOP_LOADKX:
|
||||
translateInstLoadKX(*this, pc);
|
||||
break;
|
||||
case LOP_MOVE:
|
||||
translateInstMove(*this, pc);
|
||||
break;
|
||||
case LOP_GETGLOBAL:
|
||||
translateInstGetGlobal(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETGLOBAL:
|
||||
translateInstSetGlobal(*this, pc, i);
|
||||
break;
|
||||
case LOP_CALL:
|
||||
inst(IrCmd::LOP_CALL, constUint(i));
|
||||
|
||||
if (activeFastcallFallback)
|
||||
{
|
||||
inst(IrCmd::JUMP, fastcallFallbackReturn);
|
||||
|
||||
beginBlock(fastcallFallbackReturn);
|
||||
|
||||
activeFastcallFallback = false;
|
||||
}
|
||||
break;
|
||||
case LOP_RETURN:
|
||||
inst(IrCmd::LOP_RETURN, constUint(i));
|
||||
break;
|
||||
case LOP_GETTABLE:
|
||||
translateInstGetTable(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETTABLE:
|
||||
translateInstSetTable(*this, pc, i);
|
||||
break;
|
||||
case LOP_GETTABLEKS:
|
||||
translateInstGetTableKS(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETTABLEKS:
|
||||
translateInstSetTableKS(*this, pc, i);
|
||||
break;
|
||||
case LOP_GETTABLEN:
|
||||
translateInstGetTableN(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETTABLEN:
|
||||
translateInstSetTableN(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMP:
|
||||
translateInstJump(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPBACK:
|
||||
translateInstJumpBack(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPIF:
|
||||
translateInstJumpIf(*this, pc, i, /* not_ */ false);
|
||||
break;
|
||||
case LOP_JUMPIFNOT:
|
||||
translateInstJumpIf(*this, pc, i, /* not_ */ true);
|
||||
break;
|
||||
case LOP_JUMPIFEQ:
|
||||
translateInstJumpIfEq(*this, pc, i, /* not_ */ false);
|
||||
break;
|
||||
case LOP_JUMPIFLE:
|
||||
translateInstJumpIfCond(*this, pc, i, IrCondition::LessEqual);
|
||||
break;
|
||||
case LOP_JUMPIFLT:
|
||||
translateInstJumpIfCond(*this, pc, i, IrCondition::Less);
|
||||
break;
|
||||
case LOP_JUMPIFNOTEQ:
|
||||
translateInstJumpIfEq(*this, pc, i, /* not_ */ true);
|
||||
break;
|
||||
case LOP_JUMPIFNOTLE:
|
||||
translateInstJumpIfCond(*this, pc, i, IrCondition::NotLessEqual);
|
||||
break;
|
||||
case LOP_JUMPIFNOTLT:
|
||||
translateInstJumpIfCond(*this, pc, i, IrCondition::NotLess);
|
||||
break;
|
||||
case LOP_JUMPX:
|
||||
translateInstJumpX(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPXEQKNIL:
|
||||
translateInstJumpxEqNil(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPXEQKB:
|
||||
translateInstJumpxEqB(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPXEQKN:
|
||||
translateInstJumpxEqN(*this, pc, i);
|
||||
break;
|
||||
case LOP_JUMPXEQKS:
|
||||
translateInstJumpxEqS(*this, pc, i);
|
||||
break;
|
||||
case LOP_ADD:
|
||||
translateInstBinary(*this, pc, i, TM_ADD);
|
||||
break;
|
||||
case LOP_SUB:
|
||||
translateInstBinary(*this, pc, i, TM_SUB);
|
||||
break;
|
||||
case LOP_MUL:
|
||||
translateInstBinary(*this, pc, i, TM_MUL);
|
||||
break;
|
||||
case LOP_DIV:
|
||||
translateInstBinary(*this, pc, i, TM_DIV);
|
||||
break;
|
||||
case LOP_MOD:
|
||||
translateInstBinary(*this, pc, i, TM_MOD);
|
||||
break;
|
||||
case LOP_POW:
|
||||
translateInstBinary(*this, pc, i, TM_POW);
|
||||
break;
|
||||
case LOP_ADDK:
|
||||
translateInstBinaryK(*this, pc, i, TM_ADD);
|
||||
break;
|
||||
case LOP_SUBK:
|
||||
translateInstBinaryK(*this, pc, i, TM_SUB);
|
||||
break;
|
||||
case LOP_MULK:
|
||||
translateInstBinaryK(*this, pc, i, TM_MUL);
|
||||
break;
|
||||
case LOP_DIVK:
|
||||
translateInstBinaryK(*this, pc, i, TM_DIV);
|
||||
break;
|
||||
case LOP_MODK:
|
||||
translateInstBinaryK(*this, pc, i, TM_MOD);
|
||||
break;
|
||||
case LOP_POWK:
|
||||
translateInstBinaryK(*this, pc, i, TM_POW);
|
||||
break;
|
||||
case LOP_NOT:
|
||||
translateInstNot(*this, pc);
|
||||
break;
|
||||
case LOP_MINUS:
|
||||
translateInstMinus(*this, pc, i);
|
||||
break;
|
||||
case LOP_LENGTH:
|
||||
translateInstLength(*this, pc, i);
|
||||
break;
|
||||
case LOP_NEWTABLE:
|
||||
translateInstNewTable(*this, pc, i);
|
||||
break;
|
||||
case LOP_DUPTABLE:
|
||||
translateInstDupTable(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETLIST:
|
||||
inst(IrCmd::LOP_SETLIST, constUint(i));
|
||||
break;
|
||||
case LOP_GETUPVAL:
|
||||
translateInstGetUpval(*this, pc, i);
|
||||
break;
|
||||
case LOP_SETUPVAL:
|
||||
translateInstSetUpval(*this, pc, i);
|
||||
break;
|
||||
case LOP_CLOSEUPVALS:
|
||||
translateInstCloseUpvals(*this, pc);
|
||||
break;
|
||||
case LOP_FASTCALL:
|
||||
{
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
IrOp next = blockAtInst(i + LUAU_INSN_C(*pc) + 2);
|
||||
|
||||
inst(IrCmd::LOP_FASTCALL, constUint(i), fallback);
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(fallback);
|
||||
|
||||
activeFastcallFallback = true;
|
||||
fastcallFallbackReturn = next;
|
||||
break;
|
||||
}
|
||||
case LOP_FASTCALL1:
|
||||
{
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
IrOp next = blockAtInst(i + LUAU_INSN_C(*pc) + 2);
|
||||
|
||||
inst(IrCmd::LOP_FASTCALL1, constUint(i), fallback);
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(fallback);
|
||||
|
||||
activeFastcallFallback = true;
|
||||
fastcallFallbackReturn = next;
|
||||
break;
|
||||
}
|
||||
case LOP_FASTCALL2:
|
||||
{
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
IrOp next = blockAtInst(i + LUAU_INSN_C(*pc) + 2);
|
||||
|
||||
inst(IrCmd::LOP_FASTCALL2, constUint(i), fallback);
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(fallback);
|
||||
|
||||
activeFastcallFallback = true;
|
||||
fastcallFallbackReturn = next;
|
||||
break;
|
||||
}
|
||||
case LOP_FASTCALL2K:
|
||||
{
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
IrOp next = blockAtInst(i + LUAU_INSN_C(*pc) + 2);
|
||||
|
||||
inst(IrCmd::LOP_FASTCALL2K, constUint(i), fallback);
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(fallback);
|
||||
|
||||
activeFastcallFallback = true;
|
||||
fastcallFallbackReturn = next;
|
||||
break;
|
||||
}
|
||||
case LOP_FORNPREP:
|
||||
{
|
||||
IrOp loopExit = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
|
||||
|
||||
inst(IrCmd::LOP_FORNPREP, constUint(i), loopExit);
|
||||
break;
|
||||
}
|
||||
case LOP_FORNLOOP:
|
||||
{
|
||||
IrOp loopRepeat = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp loopExit = blockAtInst(i + getOpLength(LOP_FORNLOOP));
|
||||
|
||||
inst(IrCmd::LOP_FORNLOOP, constUint(i), loopRepeat, loopExit);
|
||||
|
||||
beginBlock(loopExit);
|
||||
break;
|
||||
}
|
||||
case LOP_FORGLOOP:
|
||||
{
|
||||
IrOp loopRepeat = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp loopExit = blockAtInst(i + getOpLength(LOP_FORGLOOP));
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
|
||||
inst(IrCmd::LOP_FORGLOOP, constUint(i), loopRepeat, loopExit, fallback);
|
||||
|
||||
beginBlock(fallback);
|
||||
inst(IrCmd::LOP_FORGLOOP_FALLBACK, constUint(i), loopRepeat, loopExit);
|
||||
|
||||
beginBlock(loopExit);
|
||||
break;
|
||||
}
|
||||
case LOP_FORGPREP_NEXT:
|
||||
{
|
||||
IrOp target = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
|
||||
inst(IrCmd::LOP_FORGPREP_NEXT, constUint(i), target, fallback);
|
||||
|
||||
beginBlock(fallback);
|
||||
inst(IrCmd::LOP_FORGPREP_XNEXT_FALLBACK, constUint(i), target);
|
||||
break;
|
||||
}
|
||||
case LOP_FORGPREP_INEXT:
|
||||
{
|
||||
IrOp target = blockAtInst(i + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
|
||||
inst(IrCmd::LOP_FORGPREP_INEXT, constUint(i), target, fallback);
|
||||
|
||||
beginBlock(fallback);
|
||||
inst(IrCmd::LOP_FORGPREP_XNEXT_FALLBACK, constUint(i), target);
|
||||
break;
|
||||
}
|
||||
case LOP_AND:
|
||||
inst(IrCmd::LOP_AND, constUint(i));
|
||||
break;
|
||||
case LOP_ANDK:
|
||||
inst(IrCmd::LOP_ANDK, constUint(i));
|
||||
break;
|
||||
case LOP_OR:
|
||||
inst(IrCmd::LOP_OR, constUint(i));
|
||||
break;
|
||||
case LOP_ORK:
|
||||
inst(IrCmd::LOP_ORK, constUint(i));
|
||||
break;
|
||||
case LOP_COVERAGE:
|
||||
inst(IrCmd::LOP_COVERAGE, constUint(i));
|
||||
break;
|
||||
case LOP_GETIMPORT:
|
||||
translateInstGetImport(*this, pc, i);
|
||||
break;
|
||||
case LOP_CONCAT:
|
||||
translateInstConcat(*this, pc, i);
|
||||
break;
|
||||
case LOP_CAPTURE:
|
||||
translateInstCapture(*this, pc, i);
|
||||
break;
|
||||
case LOP_NAMECALL:
|
||||
{
|
||||
IrOp next = blockAtInst(i + getOpLength(LOP_NAMECALL));
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
|
||||
inst(IrCmd::LOP_NAMECALL, constUint(i), next, fallback);
|
||||
|
||||
beginBlock(fallback);
|
||||
inst(IrCmd::FALLBACK_NAMECALL, constUint(i));
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(next);
|
||||
break;
|
||||
}
|
||||
case LOP_PREPVARARGS:
|
||||
inst(IrCmd::FALLBACK_PREPVARARGS, constUint(i));
|
||||
break;
|
||||
case LOP_GETVARARGS:
|
||||
inst(IrCmd::FALLBACK_GETVARARGS, constUint(i));
|
||||
break;
|
||||
case LOP_NEWCLOSURE:
|
||||
inst(IrCmd::FALLBACK_NEWCLOSURE, constUint(i));
|
||||
break;
|
||||
case LOP_DUPCLOSURE:
|
||||
inst(IrCmd::FALLBACK_DUPCLOSURE, constUint(i));
|
||||
break;
|
||||
case LOP_FORGPREP:
|
||||
inst(IrCmd::FALLBACK_FORGPREP, constUint(i));
|
||||
break;
|
||||
default:
|
||||
LUAU_ASSERT(!"unknown instruction");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool IrBuilder::isInternalBlock(IrOp block)
|
||||
{
|
||||
IrBlock& target = function.blocks[block.index];
|
||||
|
||||
return target.kind == IrBlockKind::Internal;
|
||||
}
|
||||
|
||||
void IrBuilder::beginBlock(IrOp block)
|
||||
{
|
||||
function.blocks[block.index].start = uint32_t(function.instructions.size());
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constBool(bool value)
|
||||
{
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Bool;
|
||||
constant.valueBool = value;
|
||||
return constAny(constant);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constInt(int value)
|
||||
{
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Int;
|
||||
constant.valueInt = value;
|
||||
return constAny(constant);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constUint(unsigned value)
|
||||
{
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Uint;
|
||||
constant.valueUint = value;
|
||||
return constAny(constant);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constDouble(double value)
|
||||
{
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Double;
|
||||
constant.valueDouble = value;
|
||||
return constAny(constant);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constTag(uint8_t value)
|
||||
{
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Tag;
|
||||
constant.valueTag = value;
|
||||
return constAny(constant);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constAny(IrConst constant)
|
||||
{
|
||||
uint32_t index = uint32_t(function.constants.size());
|
||||
function.constants.push_back(constant);
|
||||
return {IrOpKind::Constant, index};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::cond(IrCondition cond)
|
||||
{
|
||||
return {IrOpKind::Condition, uint32_t(cond)};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd)
|
||||
{
|
||||
return inst(cmd, {}, {}, {}, {}, {});
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd, IrOp a)
|
||||
{
|
||||
return inst(cmd, a, {}, {}, {}, {});
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b)
|
||||
{
|
||||
return inst(cmd, a, b, {}, {}, {});
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c)
|
||||
{
|
||||
return inst(cmd, a, b, c, {}, {});
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d)
|
||||
{
|
||||
return inst(cmd, a, b, c, d, {});
|
||||
}
|
||||
|
||||
IrOp IrBuilder::inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d, IrOp e)
|
||||
{
|
||||
uint32_t index = uint32_t(function.instructions.size());
|
||||
function.instructions.push_back({cmd, a, b, c, d, e});
|
||||
return {IrOpKind::Inst, index};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::block(IrBlockKind kind)
|
||||
{
|
||||
if (kind == IrBlockKind::Internal && activeFastcallFallback)
|
||||
kind = IrBlockKind::Fallback;
|
||||
|
||||
uint32_t index = uint32_t(function.blocks.size());
|
||||
function.blocks.push_back(IrBlock{kind, ~0u});
|
||||
return IrOp{IrOpKind::Block, index};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::blockAtInst(uint32_t index)
|
||||
{
|
||||
uint32_t blockIndex = instIndexToBlock[index];
|
||||
|
||||
if (blockIndex != kNoAssociatedBlockIndex)
|
||||
return IrOp{IrOpKind::Block, blockIndex};
|
||||
|
||||
return block(IrBlockKind::Internal);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::vmReg(uint8_t index)
|
||||
{
|
||||
return {IrOpKind::VmReg, index};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::vmConst(uint32_t index)
|
||||
{
|
||||
return {IrOpKind::VmConst, index};
|
||||
}
|
||||
|
||||
IrOp IrBuilder::vmUpvalue(uint8_t index)
|
||||
{
|
||||
return {IrOpKind::VmUpvalue, index};
|
||||
}
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
63
CodeGen/src/IrBuilder.h
Normal file
63
CodeGen/src/IrBuilder.h
Normal file
|
@ -0,0 +1,63 @@
|
|||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/Bytecode.h"
|
||||
|
||||
#include "IrData.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
struct Proto;
|
||||
typedef uint32_t Instruction;
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
struct AssemblyOptions;
|
||||
|
||||
struct IrBuilder
|
||||
{
|
||||
void buildFunctionIr(Proto* proto);
|
||||
|
||||
void rebuildBytecodeBasicBlocks(Proto* proto);
|
||||
void translateInst(LuauOpcode op, const Instruction* pc, int i);
|
||||
|
||||
bool isInternalBlock(IrOp block);
|
||||
void beginBlock(IrOp block);
|
||||
|
||||
IrOp constBool(bool value);
|
||||
IrOp constInt(int value);
|
||||
IrOp constUint(unsigned value);
|
||||
IrOp constDouble(double value);
|
||||
IrOp constTag(uint8_t value);
|
||||
IrOp constAny(IrConst constant);
|
||||
|
||||
IrOp cond(IrCondition cond);
|
||||
|
||||
IrOp inst(IrCmd cmd);
|
||||
IrOp inst(IrCmd cmd, IrOp a);
|
||||
IrOp inst(IrCmd cmd, IrOp a, IrOp b);
|
||||
IrOp inst(IrCmd cmd, IrOp a, IrOp b, IrOp c);
|
||||
IrOp inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d);
|
||||
IrOp inst(IrCmd cmd, IrOp a, IrOp b, IrOp c, IrOp d, IrOp e);
|
||||
|
||||
IrOp block(IrBlockKind kind); // Requested kind can be ignored if we are in an outlined sequence
|
||||
IrOp blockAtInst(uint32_t index);
|
||||
|
||||
IrOp vmReg(uint8_t index);
|
||||
IrOp vmConst(uint32_t index);
|
||||
IrOp vmUpvalue(uint8_t index);
|
||||
|
||||
bool activeFastcallFallback = false;
|
||||
IrOp fastcallFallbackReturn;
|
||||
|
||||
IrFunction function;
|
||||
|
||||
std::vector<uint32_t> instIndexToBlock; // Block index at the bytecode instruction
|
||||
};
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
|
@ -9,6 +9,8 @@
|
|||
|
||||
#include <stdint.h>
|
||||
|
||||
struct Proto;
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
|
@ -99,6 +101,7 @@ enum class IrCmd : uint8_t
|
|||
|
||||
// Operations that don't have an IR representation yet
|
||||
LOP_SETLIST,
|
||||
LOP_NAMECALL,
|
||||
LOP_CALL,
|
||||
LOP_RETURN,
|
||||
LOP_FASTCALL,
|
||||
|
@ -116,21 +119,21 @@ enum class IrCmd : uint8_t
|
|||
LOP_ANDK,
|
||||
LOP_OR,
|
||||
LOP_ORK,
|
||||
LOP_COVERAGE,
|
||||
|
||||
// Operations that have a translation, but use a full instruction fallback
|
||||
FALLBACK_GETGLOBAL,
|
||||
FALLBACK_SETGLOBAL,
|
||||
FALLBACK_GETTABLEKS,
|
||||
FALLBACK_SETTABLEKS,
|
||||
FALLBACK_NAMECALL,
|
||||
|
||||
// Operations that don't have assembly lowering at all
|
||||
FALLBACK_NAMECALL,
|
||||
FALLBACK_PREPVARARGS,
|
||||
FALLBACK_GETVARARGS,
|
||||
FALLBACK_NEWCLOSURE,
|
||||
FALLBACK_DUPCLOSURE,
|
||||
FALLBACK_FORGPREP,
|
||||
FALLBACK_COVERAGE,
|
||||
};
|
||||
|
||||
enum class IrConstKind : uint8_t
|
||||
|
@ -274,6 +277,8 @@ struct IrFunction
|
|||
std::vector<IrConst> constants;
|
||||
|
||||
std::vector<BytecodeMapping> bcMapping;
|
||||
|
||||
Proto* proto = nullptr;
|
||||
};
|
||||
|
||||
} // namespace CodeGen
|
||||
|
|
|
@ -186,6 +186,8 @@ const char* getCmdName(IrCmd cmd)
|
|||
return "CAPTURE";
|
||||
case IrCmd::LOP_SETLIST:
|
||||
return "LOP_SETLIST";
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
return "LOP_NAMECALL";
|
||||
case IrCmd::LOP_CALL:
|
||||
return "LOP_CALL";
|
||||
case IrCmd::LOP_RETURN:
|
||||
|
@ -220,6 +222,8 @@ const char* getCmdName(IrCmd cmd)
|
|||
return "LOP_OR";
|
||||
case IrCmd::LOP_ORK:
|
||||
return "LOP_ORK";
|
||||
case IrCmd::LOP_COVERAGE:
|
||||
return "LOP_COVERAGE";
|
||||
case IrCmd::FALLBACK_GETGLOBAL:
|
||||
return "FALLBACK_GETGLOBAL";
|
||||
case IrCmd::FALLBACK_SETGLOBAL:
|
||||
|
@ -240,8 +244,6 @@ const char* getCmdName(IrCmd cmd)
|
|||
return "FALLBACK_DUPCLOSURE";
|
||||
case IrCmd::FALLBACK_FORGPREP:
|
||||
return "FALLBACK_FORGPREP";
|
||||
case IrCmd::FALLBACK_COVERAGE:
|
||||
return "FALLBACK_COVERAGE";
|
||||
}
|
||||
|
||||
LUAU_UNREACHABLE();
|
||||
|
@ -375,5 +377,48 @@ void toStringDetailed(IrToStringContext& ctx, IrInst inst, uint32_t index)
|
|||
append(ctx.result, "; useCount: %d, lastUse: %%%u\n", inst.useCount, inst.lastUse);
|
||||
}
|
||||
|
||||
std::string dump(IrFunction& function)
|
||||
{
|
||||
std::string result;
|
||||
IrToStringContext ctx{result, function.blocks, function.constants};
|
||||
|
||||
for (size_t i = 0; i < function.blocks.size(); i++)
|
||||
{
|
||||
IrBlock& block = function.blocks[i];
|
||||
|
||||
append(ctx.result, "%s_%u:\n", getBlockKindName(block.kind), unsigned(i));
|
||||
|
||||
if (block.start == ~0u)
|
||||
{
|
||||
append(ctx.result, " *empty*\n\n");
|
||||
continue;
|
||||
}
|
||||
|
||||
for (uint32_t index = block.start; true; index++)
|
||||
{
|
||||
LUAU_ASSERT(index < function.instructions.size());
|
||||
|
||||
IrInst& inst = function.instructions[index];
|
||||
|
||||
// Nop is used to replace dead instructions in-place, so it's not that useful to see them
|
||||
if (inst.cmd == IrCmd::NOP)
|
||||
continue;
|
||||
|
||||
append(ctx.result, " ");
|
||||
toStringDetailed(ctx, inst, index);
|
||||
|
||||
if (isBlockTerminator(inst.cmd))
|
||||
{
|
||||
append(ctx.result, "\n");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
printf("%s\n", result.c_str());
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
|
|
@ -28,5 +28,7 @@ void toString(std::string& result, IrConst constant);
|
|||
|
||||
void toStringDetailed(IrToStringContext& ctx, IrInst inst, uint32_t index);
|
||||
|
||||
std::string dump(IrFunction& function);
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
|
780
CodeGen/src/IrTranslation.cpp
Normal file
780
CodeGen/src/IrTranslation.cpp
Normal file
|
@ -0,0 +1,780 @@
|
|||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "IrTranslation.h"
|
||||
|
||||
#include "Luau/Bytecode.h"
|
||||
|
||||
#include "IrBuilder.h"
|
||||
|
||||
#include "lobject.h"
|
||||
#include "ltm.h"
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
// Helper to consistently define a switch to instruction fallback code
|
||||
struct FallbackStreamScope
|
||||
{
|
||||
FallbackStreamScope(IrBuilder& build, IrOp fallback, IrOp next)
|
||||
: build(build)
|
||||
, next(next)
|
||||
{
|
||||
LUAU_ASSERT(fallback.kind == IrOpKind::Block);
|
||||
LUAU_ASSERT(next.kind == IrOpKind::Block);
|
||||
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
build.beginBlock(fallback);
|
||||
}
|
||||
|
||||
~FallbackStreamScope()
|
||||
{
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
IrBuilder& build;
|
||||
IrOp next;
|
||||
};
|
||||
|
||||
void translateInstLoadNil(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNIL));
|
||||
}
|
||||
|
||||
void translateInstLoadB(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(ra), build.constInt(LUAU_INSN_B(*pc)));
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TBOOLEAN));
|
||||
|
||||
if (int target = LUAU_INSN_C(*pc))
|
||||
build.inst(IrCmd::JUMP, build.blockAtInst(pcpos + 1 + target));
|
||||
}
|
||||
|
||||
void translateInstLoadN(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), build.constDouble(double(LUAU_INSN_D(*pc))));
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
||||
}
|
||||
|
||||
void translateInstLoadK(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, build.vmConst(LUAU_INSN_D(*pc)));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), load);
|
||||
}
|
||||
|
||||
void translateInstLoadKX(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, build.vmConst(aux));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), load);
|
||||
}
|
||||
|
||||
void translateInstMove(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(rb));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), load);
|
||||
}
|
||||
|
||||
void translateInstJump(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
build.inst(IrCmd::JUMP, build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc)));
|
||||
}
|
||||
|
||||
void translateInstJumpBack(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
build.inst(IrCmd::INTERRUPT, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc)));
|
||||
}
|
||||
|
||||
void translateInstJumpIf(IrBuilder& build, const Instruction* pc, int pcpos, bool not_)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
|
||||
// TODO: falsy/truthy conditions should be deconstructed into more primitive operations
|
||||
if (not_)
|
||||
build.inst(IrCmd::JUMP_IF_FALSY, build.vmReg(ra), target, next);
|
||||
else
|
||||
build.inst(IrCmd::JUMP_IF_TRUTHY, build.vmReg(ra), target, next);
|
||||
|
||||
// Fallthrough in original bytecode is implicit, so we start next internal block here
|
||||
if (build.isInternalBlock(next))
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
void translateInstJumpIfEq(IrBuilder& build, const Instruction* pc, int pcpos, bool not_)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = pc[1];
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
IrOp numberCheck = build.block(IrBlockKind::Internal);
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, ta, tb, numberCheck, not_ ? target : next);
|
||||
|
||||
build.beginBlock(numberCheck);
|
||||
|
||||
// fast-path: number
|
||||
build.inst(IrCmd::CHECK_TAG, ta, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
IrOp va = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(ra));
|
||||
IrOp vb = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rb));
|
||||
|
||||
build.inst(IrCmd::JUMP_CMP_NUM, va, vb, build.cond(IrCondition::NotEqual), not_ ? target : next, not_ ? next : target);
|
||||
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::JUMP_CMP_ANY, build.vmReg(ra), build.vmReg(rb), build.cond(not_ ? IrCondition::NotEqual : IrCondition::Equal), target, next);
|
||||
}
|
||||
|
||||
void translateInstJumpIfCond(IrBuilder& build, const Instruction* pc, int pcpos, IrCondition cond)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = pc[1];
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
// fast-path: number
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
build.inst(IrCmd::CHECK_TAG, ta, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
IrOp va = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(ra));
|
||||
IrOp vb = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rb));
|
||||
|
||||
build.inst(IrCmd::JUMP_CMP_NUM, va, vb, build.cond(cond), target, next);
|
||||
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::JUMP_CMP_ANY, build.vmReg(ra), build.vmReg(rb), build.cond(cond), target, next);
|
||||
}
|
||||
|
||||
void translateInstJumpX(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
build.inst(IrCmd::INTERRUPT, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, build.blockAtInst(pcpos + 1 + LUAU_INSN_E(*pc)));
|
||||
}
|
||||
|
||||
void translateInstJumpxEqNil(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
bool not_ = (pc[1] & 0x80000000) != 0;
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, ta, build.constTag(LUA_TNIL), not_ ? next : target, not_ ? target : next);
|
||||
|
||||
// Fallthrough in original bytecode is implicit, so we start next internal block here
|
||||
if (build.isInternalBlock(next))
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
void translateInstJumpxEqB(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
bool not_ = (aux & 0x80000000) != 0;
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
IrOp checkValue = build.block(IrBlockKind::Internal);
|
||||
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, ta, build.constTag(LUA_TBOOLEAN), checkValue, not_ ? target : next);
|
||||
|
||||
build.beginBlock(checkValue);
|
||||
IrOp va = build.inst(IrCmd::LOAD_INT, build.vmReg(ra));
|
||||
|
||||
build.inst(IrCmd::JUMP_EQ_BOOLEAN, va, build.constBool(aux & 0x1), not_ ? next : target, not_ ? target : next);
|
||||
|
||||
// Fallthrough in original bytecode is implicit, so we start next internal block here
|
||||
if (build.isInternalBlock(next))
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
void translateInstJumpxEqN(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
bool not_ = (aux & 0x80000000) != 0;
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
IrOp checkValue = build.block(IrBlockKind::Internal);
|
||||
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, ta, build.constTag(LUA_TNUMBER), checkValue, not_ ? target : next);
|
||||
|
||||
build.beginBlock(checkValue);
|
||||
IrOp va = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(ra));
|
||||
IrOp vb = build.inst(IrCmd::LOAD_DOUBLE, build.vmConst(aux & 0xffffff));
|
||||
|
||||
build.inst(IrCmd::JUMP_CMP_NUM, va, vb, build.cond(IrCondition::NotEqual), not_ ? target : next, not_ ? next : target);
|
||||
|
||||
// Fallthrough in original bytecode is implicit, so we start next internal block here
|
||||
if (build.isInternalBlock(next))
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
void translateInstJumpxEqS(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
bool not_ = (aux & 0x80000000) != 0;
|
||||
|
||||
IrOp target = build.blockAtInst(pcpos + 1 + LUAU_INSN_D(*pc));
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
IrOp checkValue = build.block(IrBlockKind::Internal);
|
||||
|
||||
IrOp ta = build.inst(IrCmd::LOAD_TAG, build.vmReg(ra));
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, ta, build.constTag(LUA_TSTRING), checkValue, not_ ? target : next);
|
||||
|
||||
build.beginBlock(checkValue);
|
||||
IrOp va = build.inst(IrCmd::LOAD_POINTER, build.vmReg(ra));
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmConst(aux & 0xffffff));
|
||||
|
||||
build.inst(IrCmd::JUMP_EQ_POINTER, va, vb, not_ ? next : target, not_ ? target : next);
|
||||
|
||||
// Fallthrough in original bytecode is implicit, so we start next internal block here
|
||||
if (build.isInternalBlock(next))
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
static void translateInstBinaryNumeric(IrBuilder& build, int ra, int rb, int rc, IrOp opc, int pcpos, TMS tm)
|
||||
{
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
// fast-path: number
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
if (rc != -1 && rc != rb) // TODO: optimization should handle second check, but we'll test it later
|
||||
{
|
||||
IrOp tc = build.inst(IrCmd::LOAD_TAG, build.vmReg(rc));
|
||||
build.inst(IrCmd::CHECK_TAG, tc, build.constTag(LUA_TNUMBER), fallback);
|
||||
}
|
||||
|
||||
IrOp vb = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rb));
|
||||
IrOp vc = build.inst(IrCmd::LOAD_DOUBLE, opc);
|
||||
|
||||
IrOp va;
|
||||
|
||||
switch (tm)
|
||||
{
|
||||
case TM_ADD:
|
||||
va = build.inst(IrCmd::ADD_NUM, vb, vc);
|
||||
break;
|
||||
case TM_SUB:
|
||||
va = build.inst(IrCmd::SUB_NUM, vb, vc);
|
||||
break;
|
||||
case TM_MUL:
|
||||
va = build.inst(IrCmd::MUL_NUM, vb, vc);
|
||||
break;
|
||||
case TM_DIV:
|
||||
va = build.inst(IrCmd::DIV_NUM, vb, vc);
|
||||
break;
|
||||
case TM_MOD:
|
||||
va = build.inst(IrCmd::MOD_NUM, vb, vc);
|
||||
break;
|
||||
case TM_POW:
|
||||
va = build.inst(IrCmd::POW_NUM, vb, vc);
|
||||
break;
|
||||
default:
|
||||
LUAU_ASSERT(!"unsupported binary op");
|
||||
}
|
||||
|
||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), va);
|
||||
|
||||
if (ra != rb && ra != rc) // TODO: optimization should handle second check, but we'll test this later
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::DO_ARITH, build.vmReg(ra), build.vmReg(rb), opc, build.constInt(tm));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstBinary(IrBuilder& build, const Instruction* pc, int pcpos, TMS tm)
|
||||
{
|
||||
translateInstBinaryNumeric(build, LUAU_INSN_A(*pc), LUAU_INSN_B(*pc), LUAU_INSN_C(*pc), build.vmReg(LUAU_INSN_C(*pc)), pcpos, tm);
|
||||
}
|
||||
|
||||
void translateInstBinaryK(IrBuilder& build, const Instruction* pc, int pcpos, TMS tm)
|
||||
{
|
||||
translateInstBinaryNumeric(build, LUAU_INSN_A(*pc), LUAU_INSN_B(*pc), -1, build.vmConst(LUAU_INSN_C(*pc)), pcpos, tm);
|
||||
}
|
||||
|
||||
void translateInstNot(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
IrOp vb = build.inst(IrCmd::LOAD_INT, build.vmReg(rb));
|
||||
|
||||
IrOp va = build.inst(IrCmd::NOT_ANY, tb, vb);
|
||||
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(ra), va);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TBOOLEAN));
|
||||
}
|
||||
|
||||
void translateInstMinus(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
// fast-path: number
|
||||
IrOp vb = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rb));
|
||||
IrOp va = build.inst(IrCmd::UNM_NUM, vb);
|
||||
|
||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), va);
|
||||
|
||||
if (ra != rb)
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::DO_ARITH, build.vmReg(LUAU_INSN_A(*pc)), build.vmReg(LUAU_INSN_B(*pc)), build.vmReg(LUAU_INSN_B(*pc)), build.constInt(TM_UNM));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstLength(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
|
||||
// fast-path: table without __len
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
|
||||
IrOp va = build.inst(IrCmd::TABLE_LEN, vb);
|
||||
|
||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), va);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::DO_LEN, build.vmReg(LUAU_INSN_A(*pc)), build.vmReg(LUAU_INSN_B(*pc)));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstNewTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int b = LUAU_INSN_B(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
|
||||
IrOp va = build.inst(IrCmd::NEW_TABLE, build.constUint(aux), build.constUint(b == 0 ? 0 : 1 << (b - 1)));
|
||||
build.inst(IrCmd::STORE_POINTER, build.vmReg(ra), va);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TTABLE));
|
||||
|
||||
build.inst(IrCmd::CHECK_GC);
|
||||
}
|
||||
|
||||
void translateInstDupTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int k = LUAU_INSN_D(*pc);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
|
||||
IrOp table = build.inst(IrCmd::LOAD_POINTER, build.vmConst(k));
|
||||
IrOp va = build.inst(IrCmd::DUP_TABLE, table);
|
||||
build.inst(IrCmd::STORE_POINTER, build.vmReg(ra), va);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TTABLE));
|
||||
|
||||
build.inst(IrCmd::CHECK_GC);
|
||||
}
|
||||
|
||||
void translateInstGetUpval(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int up = LUAU_INSN_B(*pc);
|
||||
|
||||
build.inst(IrCmd::GET_UPVALUE, build.vmReg(ra), build.vmUpvalue(up));
|
||||
}
|
||||
|
||||
void translateInstSetUpval(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int up = LUAU_INSN_B(*pc);
|
||||
|
||||
build.inst(IrCmd::SET_UPVALUE, build.vmUpvalue(up), build.vmReg(ra));
|
||||
}
|
||||
|
||||
void translateInstCloseUpvals(IrBuilder& build, const Instruction* pc)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
||||
build.inst(IrCmd::CLOSE_UPVALS, build.vmReg(ra));
|
||||
}
|
||||
|
||||
void translateInstGetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
int c = LUAU_INSN_C(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
|
||||
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constUint(c), fallback);
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constUint(c));
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::GET_TABLE, build.vmReg(ra), build.vmReg(rb), build.constUint(c + 1));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstSetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
int c = LUAU_INSN_C(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
|
||||
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constUint(c), fallback);
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
|
||||
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constUint(c));
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_TVALUE, arrEl, tva);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, vb, build.vmReg(ra));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::SET_TABLE, build.vmReg(ra), build.vmReg(rb), build.constUint(c + 1));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstGetTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
int rc = LUAU_INSN_C(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
IrOp tc = build.inst(IrCmd::LOAD_TAG, build.vmReg(rc));
|
||||
build.inst(IrCmd::CHECK_TAG, tc, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
// fast-path: table with a number index
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
IrOp vc = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rc));
|
||||
|
||||
IrOp index = build.inst(IrCmd::NUM_TO_INDEX, vc, fallback);
|
||||
|
||||
index = build.inst(IrCmd::SUB_INT, index, build.constInt(1));
|
||||
|
||||
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, index, fallback);
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, index);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::GET_TABLE, build.vmReg(ra), build.vmReg(rb), build.vmReg(rc));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstSetTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
int rc = LUAU_INSN_C(*pc);
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
IrOp tc = build.inst(IrCmd::LOAD_TAG, build.vmReg(rc));
|
||||
build.inst(IrCmd::CHECK_TAG, tc, build.constTag(LUA_TNUMBER), fallback);
|
||||
|
||||
// fast-path: table with a number index
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
IrOp vc = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rc));
|
||||
|
||||
IrOp index = build.inst(IrCmd::NUM_TO_INDEX, vc, fallback);
|
||||
|
||||
index = build.inst(IrCmd::SUB_INT, index, build.constInt(1));
|
||||
|
||||
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, index, fallback);
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
|
||||
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, index);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_TVALUE, arrEl, tva);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, vb, build.vmReg(ra));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::SET_TABLE, build.vmReg(ra), build.vmReg(rb), build.vmReg(rc));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstGetImport(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int k = LUAU_INSN_D(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp fastPath = build.block(IrBlockKind::Internal);
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
build.inst(IrCmd::CHECK_SAFE_ENV, fallback);
|
||||
|
||||
// note: if import failed, k[] is nil; we could check this during codegen, but we instead use runtime fallback
|
||||
// this allows us to handle ahead-of-time codegen smoothly when an import fails to resolve at runtime
|
||||
IrOp tk = build.inst(IrCmd::LOAD_TAG, build.vmConst(k));
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, tk, build.constTag(LUA_TNIL), fallback, fastPath);
|
||||
|
||||
build.beginBlock(fastPath);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tvk = build.inst(IrCmd::LOAD_TVALUE, build.vmConst(k));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), tvk);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::GET_IMPORT, build.vmReg(ra), build.constUint(aux));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstGetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
|
||||
IrOp addrSlotEl = build.inst(IrCmd::GET_SLOT_NODE_ADDR, vb, build.constUint(pcpos));
|
||||
|
||||
build.inst(IrCmd::CHECK_SLOT_MATCH, addrSlotEl, build.vmConst(aux), fallback);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tvn = build.inst(IrCmd::LOAD_NODE_VALUE_TV, addrSlotEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), tvn);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::FALLBACK_GETTABLEKS, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstSetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TTABLE), fallback);
|
||||
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
|
||||
IrOp addrSlotEl = build.inst(IrCmd::GET_SLOT_NODE_ADDR, vb, build.constUint(pcpos));
|
||||
|
||||
build.inst(IrCmd::CHECK_SLOT_MATCH, addrSlotEl, build.vmConst(aux), fallback);
|
||||
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_NODE_VALUE_TV, addrSlotEl, tva);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, vb, build.vmReg(ra));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::FALLBACK_SETTABLEKS, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstGetGlobal(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp env = build.inst(IrCmd::LOAD_ENV);
|
||||
IrOp addrSlotEl = build.inst(IrCmd::GET_SLOT_NODE_ADDR, env, build.constUint(pcpos));
|
||||
|
||||
build.inst(IrCmd::CHECK_SLOT_MATCH, addrSlotEl, build.vmConst(aux), fallback);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tvn = build.inst(IrCmd::LOAD_NODE_VALUE_TV, addrSlotEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), tvn);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::FALLBACK_GETGLOBAL, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstSetGlobal(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
IrOp env = build.inst(IrCmd::LOAD_ENV);
|
||||
IrOp addrSlotEl = build.inst(IrCmd::GET_SLOT_NODE_ADDR, env, build.constUint(pcpos));
|
||||
|
||||
build.inst(IrCmd::CHECK_SLOT_MATCH, addrSlotEl, build.vmConst(aux), fallback);
|
||||
build.inst(IrCmd::CHECK_READONLY, env, fallback);
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_NODE_VALUE_TV, addrSlotEl, tva);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, env, build.vmReg(ra));
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 2);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
|
||||
build.inst(IrCmd::FALLBACK_SETGLOBAL, build.constUint(pcpos));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
}
|
||||
|
||||
void translateInstConcat(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
int rc = LUAU_INSN_C(*pc);
|
||||
|
||||
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
|
||||
build.inst(IrCmd::CONCAT, build.constUint(rc - rb + 1), build.constUint(rc));
|
||||
|
||||
// TODO: per-component loads and stores might be preferable
|
||||
IrOp tvb = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(rb));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), tvb);
|
||||
|
||||
build.inst(IrCmd::CHECK_GC);
|
||||
}
|
||||
|
||||
void translateInstCapture(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int type = LUAU_INSN_A(*pc);
|
||||
int index = LUAU_INSN_B(*pc);
|
||||
|
||||
switch (type)
|
||||
{
|
||||
case LCT_VAL:
|
||||
build.inst(IrCmd::CAPTURE, build.vmReg(index), build.constBool(false));
|
||||
break;
|
||||
case LCT_REF:
|
||||
build.inst(IrCmd::CAPTURE, build.vmReg(index), build.constBool(true));
|
||||
break;
|
||||
case LCT_UPVAL:
|
||||
build.inst(IrCmd::CAPTURE, build.vmUpvalue(index), build.constBool(false));
|
||||
break;
|
||||
default:
|
||||
LUAU_ASSERT(!"Unknown upvalue capture type");
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
58
CodeGen/src/IrTranslation.h
Normal file
58
CodeGen/src/IrTranslation.h
Normal file
|
@ -0,0 +1,58 @@
|
|||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include "ltm.h"
|
||||
|
||||
typedef uint32_t Instruction;
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
enum class IrCondition : uint8_t;
|
||||
struct IrOp;
|
||||
struct IrBuilder;
|
||||
|
||||
void translateInstLoadNil(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstLoadB(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstLoadN(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstLoadK(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstLoadKX(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstMove(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstJump(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpBack(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpIf(IrBuilder& build, const Instruction* pc, int pcpos, bool not_);
|
||||
void translateInstJumpIfEq(IrBuilder& build, const Instruction* pc, int pcpos, bool not_);
|
||||
void translateInstJumpIfCond(IrBuilder& build, const Instruction* pc, int pcpos, IrCondition cond);
|
||||
void translateInstJumpX(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpxEqNil(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpxEqB(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpxEqN(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstJumpxEqS(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstBinary(IrBuilder& build, const Instruction* pc, int pcpos, TMS tm);
|
||||
void translateInstBinaryK(IrBuilder& build, const Instruction* pc, int pcpos, TMS tm);
|
||||
void translateInstNot(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstMinus(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstLength(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstNewTable(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstDupTable(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstGetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstCloseUpvals(IrBuilder& build, const Instruction* pc);
|
||||
void translateInstGetTableN(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetTableN(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstGetTable(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetTable(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstGetImport(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstGetTableKS(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetTableKS(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstGetGlobal(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetGlobal(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstConcat(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstCapture(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
|
@ -98,6 +98,7 @@ inline bool isBlockTerminator(IrCmd cmd)
|
|||
case IrCmd::JUMP_CMP_NUM:
|
||||
case IrCmd::JUMP_CMP_STR:
|
||||
case IrCmd::JUMP_CMP_ANY:
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
case IrCmd::LOP_RETURN:
|
||||
case IrCmd::LOP_FORNPREP:
|
||||
case IrCmd::LOP_FORNLOOP:
|
||||
|
|
|
@ -35,7 +35,10 @@ inline AssertHandler& assertHandler()
|
|||
return handler;
|
||||
}
|
||||
|
||||
inline int assertCallHandler(const char* expression, const char* file, int line, const char* function)
|
||||
// We want 'inline' to correctly link this function declared in the header
|
||||
// But we also want to prevent compiler from inlining this function when optimization and assertions are enabled together
|
||||
// Reason for that is that compilation times can increase significantly in such a configuration
|
||||
LUAU_NOINLINE inline int assertCallHandler(const char* expression, const char* file, int line, const char* function)
|
||||
{
|
||||
if (AssertHandler handler = assertHandler())
|
||||
return handler(expression, file, line, function);
|
||||
|
|
|
@ -4,6 +4,7 @@ if(NOT ${CMAKE_VERSION} VERSION_LESS "3.19")
|
|||
target_sources(Luau.Common PRIVATE
|
||||
Common/include/Luau/Common.h
|
||||
Common/include/Luau/Bytecode.h
|
||||
Common/include/Luau/DenseHash.h
|
||||
Common/include/Luau/ExperimentalFlags.h
|
||||
)
|
||||
endif()
|
||||
|
@ -12,7 +13,6 @@ endif()
|
|||
target_sources(Luau.Ast PRIVATE
|
||||
Ast/include/Luau/Ast.h
|
||||
Ast/include/Luau/Confusables.h
|
||||
Ast/include/Luau/DenseHash.h
|
||||
Ast/include/Luau/Lexer.h
|
||||
Ast/include/Luau/Location.h
|
||||
Ast/include/Luau/ParseOptions.h
|
||||
|
@ -82,7 +82,9 @@ target_sources(Luau.CodeGen PRIVATE
|
|||
CodeGen/src/EmitCommonX64.cpp
|
||||
CodeGen/src/EmitInstructionX64.cpp
|
||||
CodeGen/src/Fallbacks.cpp
|
||||
CodeGen/src/IrBuilder.cpp
|
||||
CodeGen/src/IrDump.cpp
|
||||
CodeGen/src/IrTranslation.cpp
|
||||
CodeGen/src/NativeState.cpp
|
||||
CodeGen/src/UnwindBuilderDwarf2.cpp
|
||||
CodeGen/src/UnwindBuilderWin.cpp
|
||||
|
@ -96,8 +98,10 @@ target_sources(Luau.CodeGen PRIVATE
|
|||
CodeGen/src/EmitInstructionX64.h
|
||||
CodeGen/src/Fallbacks.h
|
||||
CodeGen/src/FallbacksProlog.h
|
||||
CodeGen/src/IrBuilder.h
|
||||
CodeGen/src/IrDump.h
|
||||
CodeGen/src/IrData.h
|
||||
CodeGen/src/IrTranslation.h
|
||||
CodeGen/src/IrUtils.h
|
||||
CodeGen/src/NativeState.h
|
||||
)
|
||||
|
|
|
@ -20,6 +20,7 @@ message Expr {
|
|||
ExprUnary unary = 14;
|
||||
ExprBinary binary = 15;
|
||||
ExprIfElse ifelse = 16;
|
||||
ExprInterpString interpstring = 17;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,6 +162,10 @@ message ExprIfElse {
|
|||
}
|
||||
}
|
||||
|
||||
message ExprInterpString {
|
||||
repeated Expr parts = 1;
|
||||
}
|
||||
|
||||
message LValue {
|
||||
oneof lvalue_oneof {
|
||||
ExprLocal local = 1;
|
||||
|
|
|
@ -282,6 +282,8 @@ struct ProtoToLuau
|
|||
print(expr.binary());
|
||||
else if (expr.has_ifelse())
|
||||
print(expr.ifelse());
|
||||
else if (expr.has_interpstring())
|
||||
print(expr.interpstring());
|
||||
else
|
||||
source += "_";
|
||||
}
|
||||
|
@ -538,6 +540,28 @@ struct ProtoToLuau
|
|||
}
|
||||
}
|
||||
|
||||
void print(const luau::ExprInterpString& expr)
|
||||
{
|
||||
source += "`";
|
||||
|
||||
for (int i = 0; i < expr.parts_size(); ++i)
|
||||
{
|
||||
if (expr.parts(i).has_string())
|
||||
{
|
||||
// String literal is added surrounded with "", but that's ok
|
||||
print(expr.parts(i));
|
||||
}
|
||||
else
|
||||
{
|
||||
source += "{";
|
||||
print(expr.parts(i));
|
||||
source += "}";
|
||||
}
|
||||
}
|
||||
|
||||
source += "`";
|
||||
}
|
||||
|
||||
void print(const luau::LValue& expr)
|
||||
{
|
||||
if (expr.has_local())
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
LUAU_FASTFLAG(LuauTraceTypesInNonstrictMode2)
|
||||
LUAU_FASTFLAG(LuauSetMetatableDoesNotTimeTravel)
|
||||
LUAU_FASTFLAG(LuauFixAutocompleteInIf)
|
||||
LUAU_FASTFLAG(LuauFixAutocompleteInWhile)
|
||||
LUAU_FASTFLAG(LuauFixAutocompleteInFor)
|
||||
|
||||
using namespace Luau;
|
||||
|
||||
|
@ -380,7 +382,7 @@ TEST_CASE_FIXTURE(ACFixture, "table_intersection")
|
|||
{
|
||||
check(R"(
|
||||
type t1 = { a1 : string, b2 : number }
|
||||
type t2 = { b2 : string, c3 : string }
|
||||
type t2 = { b2 : number, c3 : string }
|
||||
function func(abc : t1 & t2)
|
||||
abc. @1
|
||||
end
|
||||
|
@ -629,9 +631,19 @@ TEST_CASE_FIXTURE(ACFixture, "autocomplete_for_middle_keywords")
|
|||
)");
|
||||
|
||||
auto ac5 = autocomplete('1');
|
||||
if (FFlag::LuauFixAutocompleteInFor)
|
||||
{
|
||||
CHECK_EQ(ac5.entryMap.count("math"), 1);
|
||||
CHECK_EQ(ac5.entryMap.count("do"), 0);
|
||||
CHECK_EQ(ac5.entryMap.count("end"), 0);
|
||||
CHECK_EQ(ac5.context, AutocompleteContext::Expression);
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(ac5.entryMap.count("do"), 1);
|
||||
CHECK_EQ(ac5.entryMap.count("end"), 0);
|
||||
CHECK_EQ(ac5.context, AutocompleteContext::Keyword);
|
||||
}
|
||||
|
||||
check(R"(
|
||||
for x = 1, 2, 5 f@1
|
||||
|
@ -649,6 +661,31 @@ TEST_CASE_FIXTURE(ACFixture, "autocomplete_for_middle_keywords")
|
|||
auto ac7 = autocomplete('1');
|
||||
CHECK_EQ(ac7.entryMap.count("end"), 1);
|
||||
CHECK_EQ(ac7.context, AutocompleteContext::Statement);
|
||||
|
||||
if (FFlag::LuauFixAutocompleteInFor)
|
||||
{
|
||||
check(R"(local Foo = 1
|
||||
for x = @11, @22, @35
|
||||
)");
|
||||
|
||||
for (int i = 0; i < 3; ++i)
|
||||
{
|
||||
auto ac8 = autocomplete('1' + i);
|
||||
CHECK_EQ(ac8.entryMap.count("Foo"), 1);
|
||||
CHECK_EQ(ac8.entryMap.count("do"), 0);
|
||||
}
|
||||
|
||||
check(R"(local Foo = 1
|
||||
for x = @11, @22
|
||||
)");
|
||||
|
||||
for (int i = 0; i < 2; ++i)
|
||||
{
|
||||
auto ac9 = autocomplete('1' + i);
|
||||
CHECK_EQ(ac9.entryMap.count("Foo"), 1);
|
||||
CHECK_EQ(ac9.entryMap.count("do"), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(ACFixture, "autocomplete_for_in_middle_keywords")
|
||||
|
@ -740,8 +777,18 @@ TEST_CASE_FIXTURE(ACFixture, "autocomplete_while_middle_keywords")
|
|||
)");
|
||||
|
||||
auto ac2 = autocomplete('1');
|
||||
if (FFlag::LuauFixAutocompleteInWhile)
|
||||
{
|
||||
CHECK_EQ(3, ac2.entryMap.size());
|
||||
CHECK_EQ(ac2.entryMap.count("do"), 1);
|
||||
CHECK_EQ(ac2.entryMap.count("and"), 1);
|
||||
CHECK_EQ(ac2.entryMap.count("or"), 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(1, ac2.entryMap.size());
|
||||
CHECK_EQ(ac2.entryMap.count("do"), 1);
|
||||
}
|
||||
CHECK_EQ(ac2.context, AutocompleteContext::Keyword);
|
||||
|
||||
check(R"(
|
||||
|
@ -757,9 +804,31 @@ TEST_CASE_FIXTURE(ACFixture, "autocomplete_while_middle_keywords")
|
|||
)");
|
||||
|
||||
auto ac4 = autocomplete('1');
|
||||
if (FFlag::LuauFixAutocompleteInWhile)
|
||||
{
|
||||
CHECK_EQ(3, ac4.entryMap.size());
|
||||
CHECK_EQ(ac4.entryMap.count("do"), 1);
|
||||
CHECK_EQ(ac4.entryMap.count("and"), 1);
|
||||
CHECK_EQ(ac4.entryMap.count("or"), 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(1, ac4.entryMap.size());
|
||||
CHECK_EQ(ac4.entryMap.count("do"), 1);
|
||||
}
|
||||
CHECK_EQ(ac4.context, AutocompleteContext::Keyword);
|
||||
|
||||
if (FFlag::LuauFixAutocompleteInWhile)
|
||||
{
|
||||
check(R"(
|
||||
while t@1
|
||||
)");
|
||||
|
||||
auto ac5 = autocomplete('1');
|
||||
CHECK_EQ(ac5.entryMap.count("do"), 0);
|
||||
CHECK_EQ(ac5.entryMap.count("true"), 1);
|
||||
CHECK_EQ(ac5.entryMap.count("false"), 1);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(ACFixture, "autocomplete_if_middle_keywords")
|
||||
|
@ -3399,6 +3468,8 @@ TEST_CASE_FIXTURE(ACFixture, "type_reduction_is_hooked_up_to_autocomplete")
|
|||
|
||||
TEST_CASE_FIXTURE(ACFixture, "string_contents_is_available_to_callback")
|
||||
{
|
||||
ScopedFastFlag luauAutocompleteStringContent{"LuauAutocompleteStringContent", true};
|
||||
|
||||
loadDefinition(R"(
|
||||
declare function require(path: string): any
|
||||
)");
|
||||
|
@ -3414,10 +3485,9 @@ TEST_CASE_FIXTURE(ACFixture, "string_contents_is_available_to_callback")
|
|||
)");
|
||||
|
||||
bool isCorrect = false;
|
||||
auto ac1 = autocomplete('1',
|
||||
[&isCorrect](std::string, std::optional<const ClassType*>, std::optional<std::string> contents) -> std::optional<AutocompleteEntryMap>
|
||||
{
|
||||
isCorrect = contents.has_value() && contents.value() == "testing/";
|
||||
auto ac1 = autocomplete(
|
||||
'1', [&isCorrect](std::string, std::optional<const ClassType*>, std::optional<std::string> contents) -> std::optional<AutocompleteEntryMap> {
|
||||
isCorrect = contents && *contents == "testing/";
|
||||
return std::nullopt;
|
||||
});
|
||||
|
||||
|
|
|
@ -701,8 +701,7 @@ TEST_CASE("NDebugGetUpValue")
|
|||
copts.optimizationLevel = 0;
|
||||
|
||||
runConformance(
|
||||
"ndebug_upvalues.lua",
|
||||
nullptr,
|
||||
"ndebug_upvalues.lua", nullptr,
|
||||
[](lua_State* L) {
|
||||
lua_checkstack(L, LUA_MINSTACK);
|
||||
|
||||
|
|
|
@ -606,12 +606,14 @@ void createSomeClasses(Frontend* frontend)
|
|||
|
||||
TypeId childType = arena.addType(ClassType{"Child", {}, parentType, std::nullopt, {}, nullptr, "Test"});
|
||||
|
||||
ClassType* childClass = getMutable<ClassType>(childType);
|
||||
childClass->props["virtual_method"] = {makeFunction(arena, childType, {}, {})};
|
||||
|
||||
addGlobalBinding(*frontend, "Child", {childType});
|
||||
moduleScope->exportedTypeBindings["Child"] = TypeFun{{}, childType};
|
||||
|
||||
TypeId anotherChildType = arena.addType(ClassType{"AnotherChild", {}, parentType, std::nullopt, {}, nullptr, "Test"});
|
||||
|
||||
addGlobalBinding(*frontend, "AnotherChild", {anotherChildType});
|
||||
moduleScope->exportedTypeBindings["AnotherChild"] = TypeFun{{}, anotherChildType};
|
||||
|
||||
TypeId unrelatedType = arena.addType(ClassType{"Unrelated", {}, frontend->builtinTypes->classType, std::nullopt, {}, nullptr, "Test"});
|
||||
|
||||
addGlobalBinding(*frontend, "Unrelated", {unrelatedType});
|
||||
|
|
|
@ -171,7 +171,6 @@ return bar()
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "GlobalAsLocalMultiFx")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauLintGlobalNeverReadBeforeWritten", true};
|
||||
LintResult result = lint(R"(
|
||||
function bar()
|
||||
foo = 6
|
||||
|
@ -192,7 +191,6 @@ return bar() + baz()
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "GlobalAsLocalMultiFxWithRead")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauLintGlobalNeverReadBeforeWritten", true};
|
||||
LintResult result = lint(R"(
|
||||
function bar()
|
||||
foo = 6
|
||||
|
@ -216,7 +214,6 @@ return bar() + baz() + read()
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "GlobalAsLocalWithConditional")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauLintGlobalNeverReadBeforeWritten", true};
|
||||
LintResult result = lint(R"(
|
||||
function bar()
|
||||
if true then foo = 6 end
|
||||
|
@ -236,7 +233,6 @@ return bar() + baz()
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "GlobalAsLocal3WithConditionalRead")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauLintGlobalNeverReadBeforeWritten", true};
|
||||
LintResult result = lint(R"(
|
||||
function bar()
|
||||
foo = 6
|
||||
|
@ -260,7 +256,6 @@ return bar() + baz() + read()
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "GlobalAsLocalInnerRead")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauLintGlobalNeverReadBeforeWritten", true};
|
||||
LintResult result = lint(R"(
|
||||
function foo()
|
||||
local f = function() return bar end
|
||||
|
|
|
@ -174,11 +174,6 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "table_with_any_prop")
|
|||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "intersection")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
check(R"(
|
||||
local a: number & string
|
||||
local b: number
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
|
||||
using namespace Luau;
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
||||
|
||||
struct ToDotClassFixture : Fixture
|
||||
{
|
||||
ToDotClassFixture()
|
||||
|
@ -109,6 +111,26 @@ local function f(a, ...: string) return a end
|
|||
ToDotOptions opts;
|
||||
opts.showPointers = false;
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="FunctionType 1"];
|
||||
n1 -> n2 [label="arg"];
|
||||
n2 [label="TypePack 2"];
|
||||
n2 -> n3;
|
||||
n3 [label="GenericType 3"];
|
||||
n2 -> n4 [label="tail"];
|
||||
n4 [label="VariadicTypePack 4"];
|
||||
n4 -> n5;
|
||||
n5 [label="string"];
|
||||
n1 -> n6 [label="ret"];
|
||||
n6 [label="TypePack 6"];
|
||||
n6 -> n3;
|
||||
})",
|
||||
toDot(requireType("f"), opts));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="FunctionType 1"];
|
||||
n1 -> n2 [label="arg"];
|
||||
|
@ -126,6 +148,7 @@ n7 [label="TypePack 7"];
|
|||
n7 -> n3;
|
||||
})",
|
||||
toDot(requireType("f"), opts));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "union")
|
||||
|
@ -176,6 +199,34 @@ local a: A<number, ...string>
|
|||
|
||||
ToDotOptions opts;
|
||||
opts.showPointers = false;
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="TableType A"];
|
||||
n1 -> n2 [label="x"];
|
||||
n2 [label="number"];
|
||||
n1 -> n3 [label="y"];
|
||||
n3 [label="FunctionType 3"];
|
||||
n3 -> n4 [label="arg"];
|
||||
n4 [label="TypePack 4"];
|
||||
n4 -> n5 [label="tail"];
|
||||
n5 [label="VariadicTypePack 5"];
|
||||
n5 -> n6;
|
||||
n6 [label="string"];
|
||||
n3 -> n7 [label="ret"];
|
||||
n7 [label="TypePack 7"];
|
||||
n1 -> n8 [label="[index]"];
|
||||
n8 [label="string"];
|
||||
n1 -> n9 [label="[value]"];
|
||||
n9 [label="any"];
|
||||
n1 -> n10 [label="typeParam"];
|
||||
n10 [label="number"];
|
||||
n1 -> n5 [label="typePackParam"];
|
||||
})",
|
||||
toDot(requireType("a"), opts));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="TableType A"];
|
||||
n1 -> n2 [label="x"];
|
||||
|
@ -197,6 +248,7 @@ n9 [label="number"];
|
|||
n1 -> n4 [label="typePackParam"];
|
||||
})",
|
||||
toDot(requireType("a"), opts));
|
||||
}
|
||||
|
||||
// Extra coverage with pointers (unstable values)
|
||||
(void)toDot(requireType("a"));
|
||||
|
@ -357,6 +409,22 @@ b = a
|
|||
|
||||
ToDotOptions opts;
|
||||
opts.showPointers = false;
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="BoundType 1"];
|
||||
n1 -> n2;
|
||||
n2 [label="TableType 2"];
|
||||
n2 -> n3 [label="boundTo"];
|
||||
n3 [label="TableType 3"];
|
||||
n3 -> n4 [label="x"];
|
||||
n4 [label="number"];
|
||||
})",
|
||||
toDot(*ty, opts));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(R"(digraph graphname {
|
||||
n1 [label="TableType 1"];
|
||||
n1 -> n2 [label="boundTo"];
|
||||
|
@ -365,6 +433,7 @@ n2 -> n3 [label="x"];
|
|||
n3 [label="number"];
|
||||
})",
|
||||
toDot(*ty, opts));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "builtintypes")
|
||||
|
|
|
@ -814,8 +814,6 @@ TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_hide_self_param")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "tostring_unsee_ttv_if_array")
|
||||
{
|
||||
ScopedFastFlag sff("LuauUnseeArrayTtv", true);
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x: {string}
|
||||
-- This code is constructed very specifically to use the same (by pointer
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
|
||||
using namespace Luau;
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
||||
LUAU_FASTFLAG(LuauMatchReturnsOptionalString);
|
||||
|
||||
TEST_SUITE_BEGIN("BuiltinTests");
|
||||
|
||||
|
@ -174,7 +175,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "math_max_checks_for_numbers")
|
|||
local n = math.max(1,2,"3")
|
||||
)");
|
||||
|
||||
CHECK(!result.errors.empty());
|
||||
LUAU_REQUIRE_ERRORS(result);
|
||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||
}
|
||||
|
||||
|
@ -1004,7 +1005,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "table_freeze_is_generic")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "set_metatable_needs_arguments")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauSetMetaTableArgsCheck", true};
|
||||
CheckResult result = check(R"(
|
||||
local a = {b=setmetatable}
|
||||
a.b()
|
||||
|
@ -1055,6 +1055,20 @@ end
|
|||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "string_match")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local s:string
|
||||
local p = s:match("foo")
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
CHECK_EQ(toString(requireType("p")), "string?");
|
||||
else
|
||||
CHECK_EQ(toString(requireType("p")), "string");
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types")
|
||||
{
|
||||
CheckResult result = check(R"END(
|
||||
|
@ -1063,12 +1077,21 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types")
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types2")
|
||||
TEST_CASE_FIXTURE(Fixture, "gmatch_capture_types2")
|
||||
{
|
||||
CheckResult result = check(R"END(
|
||||
local a, b, c = ("This is a string"):gmatch("(.()(%a+))")()
|
||||
|
@ -1076,9 +1099,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types2")
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_default_capture")
|
||||
|
@ -1095,6 +1127,9 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_default_capture")
|
|||
CHECK_EQ(acm->expected, 1);
|
||||
CHECK_EQ(acm->actual, 4);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
else
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
}
|
||||
|
||||
|
@ -1112,9 +1147,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_balanced_escaped_parens
|
|||
CHECK_EQ(acm->expected, 3);
|
||||
CHECK_EQ(acm->actual, 4);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "string?");
|
||||
CHECK_EQ(toString(requireType("c")), "number?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "string");
|
||||
CHECK_EQ(toString(requireType("c")), "number");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_parens_in_sets_are_ignored")
|
||||
|
@ -1131,8 +1175,16 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_parens_in_sets_are_igno
|
|||
CHECK_EQ(acm->expected, 2);
|
||||
CHECK_EQ(acm->actual, 3);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_set_containing_lbracket")
|
||||
|
@ -1143,8 +1195,16 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_set_containing_lbracket
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "number?");
|
||||
CHECK_EQ(toString(requireType("b")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "number");
|
||||
CHECK_EQ(toString(requireType("b")), "string");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "gmatch_capture_types_leading_end_bracket_is_part_of_set")
|
||||
|
@ -1192,9 +1252,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "match_capture_types")
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "match_capture_types2")
|
||||
|
@ -1210,9 +1279,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "match_capture_types2")
|
|||
CHECK_EQ(toString(tm->wantedType), "number?");
|
||||
CHECK_EQ(toString(tm->givenType), "string");
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "find_capture_types")
|
||||
|
@ -1223,9 +1301,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "find_capture_types")
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
CHECK_EQ(toString(requireType("d")), "number?");
|
||||
CHECK_EQ(toString(requireType("e")), "number?");
|
||||
}
|
||||
|
@ -1243,9 +1330,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "find_capture_types2")
|
|||
CHECK_EQ(toString(tm->wantedType), "number?");
|
||||
CHECK_EQ(toString(tm->givenType), "string");
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
CHECK_EQ(toString(requireType("d")), "number?");
|
||||
CHECK_EQ(toString(requireType("e")), "number?");
|
||||
}
|
||||
|
@ -1263,9 +1359,18 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "find_capture_types3")
|
|||
CHECK_EQ(toString(tm->wantedType), "boolean?");
|
||||
CHECK_EQ(toString(tm->givenType), "string");
|
||||
|
||||
if (FFlag::LuauMatchReturnsOptionalString)
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string?");
|
||||
CHECK_EQ(toString(requireType("b")), "number?");
|
||||
CHECK_EQ(toString(requireType("c")), "string?");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ(toString(requireType("a")), "string");
|
||||
CHECK_EQ(toString(requireType("b")), "number");
|
||||
CHECK_EQ(toString(requireType("c")), "string");
|
||||
}
|
||||
CHECK_EQ(toString(requireType("d")), "number?");
|
||||
CHECK_EQ(toString(requireType("e")), "number?");
|
||||
}
|
||||
|
|
|
@ -398,11 +398,6 @@ local a: ChildClass = i
|
|||
|
||||
TEST_CASE_FIXTURE(ClassFixture, "intersections_of_unions_of_classes")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (BaseClass | Vector2) & (ChildClass | AnotherChild)
|
||||
local y : (ChildClass | AnotherChild)
|
||||
|
@ -415,11 +410,6 @@ TEST_CASE_FIXTURE(ClassFixture, "intersections_of_unions_of_classes")
|
|||
|
||||
TEST_CASE_FIXTURE(ClassFixture, "unions_of_intersections_of_classes")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (BaseClass & ChildClass) | (BaseClass & AnotherChild) | (BaseClass & Vector2)
|
||||
local y : (ChildClass | AnotherChild)
|
||||
|
@ -482,8 +472,6 @@ caused by:
|
|||
|
||||
TEST_CASE_FIXTURE(ClassFixture, "callable_classes")
|
||||
{
|
||||
ScopedFastFlag luauCallableClasses{"LuauCallableClasses", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : CallableClass
|
||||
local y = x("testing")
|
||||
|
|
|
@ -396,8 +396,6 @@ TEST_CASE_FIXTURE(Fixture, "class_definition_string_props")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "class_definitions_reference_other_classes")
|
||||
{
|
||||
ScopedFastFlag LuauDeclareClassPrototype("LuauDeclareClassPrototype", true);
|
||||
|
||||
unfreeze(typeChecker.globalTypes);
|
||||
LoadDefinitionFileResult result = loadDefinitionFile(typeChecker, typeChecker.globalScope, R"(
|
||||
declare class Channel
|
||||
|
|
|
@ -1726,12 +1726,6 @@ foo(string.find("hello", "e"))
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "luau_subtyping_is_np_hard")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
{"LuauOverloadedFunctionSubtypingPerf", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
--!strict
|
||||
|
||||
|
@ -1834,8 +1828,6 @@ TEST_CASE_FIXTURE(Fixture, "other_things_are_not_related_to_function")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "fuzz_must_follow_in_overload_resolution")
|
||||
{
|
||||
ScopedFastFlag luauTypeInferMissingFollows{"LuauTypeInferMissingFollows", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
for _ in function<t0>():(t0)&((()->())&(()->()))
|
||||
end do
|
||||
|
|
|
@ -463,11 +463,6 @@ TEST_CASE_FIXTURE(Fixture, "intersect_false_and_bool_and_false")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "intersect_saturate_overloaded_functions")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : ((number?) -> number?) & ((string?) -> string?)
|
||||
local y : (nil) -> nil = x -- OK
|
||||
|
@ -481,11 +476,6 @@ TEST_CASE_FIXTURE(Fixture, "intersect_saturate_overloaded_functions")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_saturate_overloaded_functions")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : ((number) -> number) & ((string) -> string)
|
||||
local y : ((number | string) -> (number | string)) = x -- OK
|
||||
|
@ -499,11 +489,6 @@ TEST_CASE_FIXTURE(Fixture, "union_saturate_overloaded_functions")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "intersection_of_tables")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : { p : number?, q : string? } & { p : number?, q : number?, r : number? }
|
||||
local y : { p : number?, q : nil, r : number? } = x -- OK
|
||||
|
@ -531,8 +516,6 @@ TEST_CASE_FIXTURE(Fixture, "intersection_of_tables_with_top_properties")
|
|||
TEST_CASE_FIXTURE(Fixture, "intersection_of_tables_with_never_properties")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
{"LuauUninhabitedSubAnything2", true},
|
||||
};
|
||||
|
||||
|
@ -547,11 +530,6 @@ TEST_CASE_FIXTURE(Fixture, "intersection_of_tables_with_never_properties")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloaded_functions_returning_intersections")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : ((number?) -> ({ p : number } & { q : number })) & ((string?) -> ({ p : number } & { r : number }))
|
||||
local y : (nil) -> { p : number, q : number, r : number} = x -- OK
|
||||
|
@ -566,11 +544,6 @@ TEST_CASE_FIXTURE(Fixture, "overloaded_functions_returning_intersections")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generic")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a>()
|
||||
local x : ((number?) -> (a | number)) & ((string?) -> (a | string))
|
||||
|
@ -586,11 +559,6 @@ TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generic")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generics")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a,b,c>()
|
||||
local x : ((a?) -> (a | b)) & ((c?) -> (b | c))
|
||||
|
@ -606,11 +574,6 @@ TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generics")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generic_packs")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...,b...>()
|
||||
local x : ((number?, a...) -> (number?, b...)) & ((string?, a...) -> (string?, b...))
|
||||
|
@ -626,11 +589,6 @@ TEST_CASE_FIXTURE(Fixture, "overloaded_functions_mentioning_generic_packs")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_unknown_result")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...,b...>()
|
||||
local x : ((number) -> number) & ((nil) -> unknown)
|
||||
|
@ -646,11 +604,6 @@ TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_unknown_result")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_unknown_arguments")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...,b...>()
|
||||
local x : ((number) -> number?) & ((unknown) -> string?)
|
||||
|
@ -666,11 +619,6 @@ TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_unknown_arguments")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_never_result")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...,b...>()
|
||||
local x : ((number) -> number) & ((nil) -> never)
|
||||
|
@ -686,11 +634,6 @@ TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_never_result")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_never_arguments")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...,b...>()
|
||||
local x : ((number) -> number?) & ((never) -> string?)
|
||||
|
@ -779,11 +722,6 @@ TEST_CASE_FIXTURE(Fixture, "overloadeded_functions_with_weird_typepacks_4")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatables")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local a : string? = nil
|
||||
local b : number? = nil
|
||||
|
@ -807,11 +745,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatables")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatable_subtypes")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x = setmetatable({ a = 5 }, { p = 5 });
|
||||
local y = setmetatable({ b = "hi" }, { p = 5, q = "hi" });
|
||||
|
@ -833,11 +766,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatable_subtypes")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatables_with_properties")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x = setmetatable({ a = 5 }, { p = 5 });
|
||||
local y = setmetatable({ b = "hi" }, { q = "hi" });
|
||||
|
@ -856,11 +784,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatables_with_properties")
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatable_with_table")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x = setmetatable({ a = 5 }, { p = 5 });
|
||||
local z = setmetatable({ a = 5, b = "hi" }, { p = 5 });
|
||||
|
@ -881,11 +804,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "intersect_metatable_with_table")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "CLI-44817")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
type X = {x: number}
|
||||
type Y = {y: number}
|
||||
|
|
|
@ -475,8 +475,6 @@ return l0
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "fuzz_anyify_variadic_return_must_follow")
|
||||
{
|
||||
ScopedFastFlag luauTypeInferMissingFollows{"LuauTypeInferMissingFollows", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
return unpack(l0[_])
|
||||
)");
|
||||
|
|
|
@ -14,9 +14,6 @@ namespace
|
|||
struct NegationFixture : Fixture
|
||||
{
|
||||
TypeArena arena;
|
||||
ScopedFastFlag sff[1]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
};
|
||||
|
||||
NegationFixture()
|
||||
{
|
||||
|
|
|
@ -72,17 +72,6 @@ TEST_CASE_FIXTURE(Fixture, "string_function_indirect")
|
|||
CHECK_EQ(*requireType("p"), *typeChecker.stringType);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "string_function_other")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local s:string
|
||||
local p = s:match("foo")
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ(toString(requireType("p")), "string");
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "CheckMethodsOfNumber")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
|
|
|
@ -800,7 +800,9 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "type_guard_can_filter_for_intersection_of_ta
|
|||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
CHECK_EQ("{| x: number |} & {| y: number |}", toString(requireTypeAtPosition({4, 28})));
|
||||
ToStringOptions opts;
|
||||
opts.exhaustive = true;
|
||||
CHECK_EQ("{| x: number |} & {| y: number |}", toString(requireTypeAtPosition({4, 28}), opts));
|
||||
CHECK_EQ("nil", toString(requireTypeAtPosition({6, 28})));
|
||||
}
|
||||
|
||||
|
@ -1436,6 +1438,32 @@ TEST_CASE_FIXTURE(RefinementClassFixture, "type_narrow_for_all_the_userdata")
|
|||
CHECK_EQ("number | string", toString(requireTypeAtPosition({5, 28})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(RefinementClassFixture, "type_narrow_but_the_discriminant_type_isnt_a_class")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string | number | Instance | Vector3)
|
||||
if type(x) == "any" then
|
||||
local foo = x
|
||||
else
|
||||
local foo = x
|
||||
end
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
CHECK_EQ("(Instance | Vector3 | number | string) & never", toString(requireTypeAtPosition({3, 28})));
|
||||
CHECK_EQ("(Instance | Vector3 | number | string) & ~never", toString(requireTypeAtPosition({5, 28})));
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_EQ("*error-type*", toString(requireTypeAtPosition({3, 28})));
|
||||
CHECK_EQ("*error-type*", toString(requireTypeAtPosition({5, 28})));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(RefinementClassFixture, "eliminate_subclasses_of_instance")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
|
@ -1721,8 +1749,6 @@ TEST_CASE_FIXTURE(Fixture, "else_with_no_explicit_expression_should_also_refine_
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "fuzz_filtered_refined_types_are_followed")
|
||||
{
|
||||
ScopedFastFlag luauTypeInferMissingFollows{"LuauTypeInferMissingFollows", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local _
|
||||
do
|
||||
|
|
|
@ -55,7 +55,10 @@ TEST_CASE_FIXTURE(Fixture, "augment_table")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "augment_nested_table")
|
||||
{
|
||||
CheckResult result = check("local t = { p = {} } t.p.foo = 'bar'");
|
||||
CheckResult result = check(R"(
|
||||
local t = { p = {} }
|
||||
t.p.foo = 'bar'
|
||||
)");
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
TableType* tType = getMutable<TableType>(requireType("t"));
|
||||
|
@ -70,19 +73,28 @@ TEST_CASE_FIXTURE(Fixture, "augment_nested_table")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "cannot_augment_sealed_table")
|
||||
{
|
||||
CheckResult result = check("function mkt() return {prop=999} end local t = mkt() t.foo = 'bar'");
|
||||
CheckResult result = check(R"(
|
||||
function mkt()
|
||||
return {prop=999}
|
||||
end
|
||||
|
||||
local t = mkt()
|
||||
t.foo = 'bar'
|
||||
)");
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
TypeError& err = result.errors[0];
|
||||
|
||||
CHECK(err.location == Location{Position{6, 8}, Position{6, 13}});
|
||||
|
||||
CannotExtendTable* error = get<CannotExtendTable>(err);
|
||||
REQUIRE(error != nullptr);
|
||||
REQUIRE_MESSAGE(error != nullptr, "Expected CannotExtendTable but got: " << toString(err));
|
||||
|
||||
// TODO: better, more robust comparison of type vars
|
||||
auto s = toString(error->tableType, ToStringOptions{/*exhaustive*/ true});
|
||||
CHECK_EQ(s, "{| prop: number |}");
|
||||
CHECK_EQ(error->prop, "foo");
|
||||
CHECK_EQ(error->context, CannotExtendTable::Property);
|
||||
CHECK_EQ(err.location, (Location{Position{0, 59}, Position{0, 64}}));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "dont_seal_an_unsealed_table_by_passing_it_to_a_function_that_takes_a_sealed_table")
|
||||
|
|
|
@ -1029,10 +1029,6 @@ TEST_CASE_FIXTURE(Fixture, "type_infer_recursion_limit_no_ice")
|
|||
TEST_CASE_FIXTURE(Fixture, "type_infer_recursion_limit_normalizer")
|
||||
{
|
||||
ScopedFastInt sfi("LuauTypeInferRecursionLimit", 10);
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a,b,c,d,e,f,g,h,i,j>()
|
||||
|
@ -1048,10 +1044,6 @@ TEST_CASE_FIXTURE(Fixture, "type_infer_recursion_limit_normalizer")
|
|||
TEST_CASE_FIXTURE(Fixture, "type_infer_cache_limit_normalizer")
|
||||
{
|
||||
ScopedFastInt sfi("LuauNormalizeCacheLimit", 10);
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : ((number) -> number) & ((string) -> string) & ((nil) -> nil) & (({}) -> {})
|
||||
|
@ -1161,8 +1153,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "it_is_ok_to_have_inconsistent_number_of_retu
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "fuzz_free_table_type_change_during_index_check")
|
||||
{
|
||||
ScopedFastFlag luauFollowInLvalueIndexCheck{"LuauFollowInLvalueIndexCheck", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local _ = nil
|
||||
while _["" >= _] do
|
||||
|
|
|
@ -112,11 +112,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_tables_are_preserved")
|
|||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_intersection_sub_never")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f(arg : string & number) : never
|
||||
return arg
|
||||
|
@ -127,11 +122,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_intersection_sub_never")
|
|||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_intersection_sub_anything")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f(arg : string & number) : boolean
|
||||
return arg
|
||||
|
@ -143,8 +133,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_intersection_sub_anything")
|
|||
TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_table_sub_never")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
{"LuauUninhabitedSubAnything2", true},
|
||||
};
|
||||
|
||||
|
@ -159,8 +147,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_table_sub_never")
|
|||
TEST_CASE_FIXTURE(TryUnifyFixture, "uninhabited_table_sub_anything")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
{"LuauUninhabitedSubAnything2", true},
|
||||
};
|
||||
|
||||
|
@ -363,8 +349,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "metatables_unify_against_shape_of_free_table
|
|||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "fuzz_tail_unification_issue")
|
||||
{
|
||||
ScopedFastFlag luauTxnLogTypePackIterator{"LuauTxnLogTypePackIterator", true};
|
||||
|
||||
TypePackVar variadicAny{VariadicTypePack{typeChecker.anyType}};
|
||||
TypePackVar packTmp{TypePack{{typeChecker.anyType}, &variadicAny}};
|
||||
TypePackVar packSub{TypePack{{typeChecker.anyType, typeChecker.anyType}, &packTmp}};
|
||||
|
@ -376,4 +360,18 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "fuzz_tail_unification_issue")
|
|||
state.tryUnify(&packSub, &packSuper);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "fuzz_unify_any_should_check_log")
|
||||
{
|
||||
ScopedFastFlag luauUnifyAnyTxnLog{"LuauUnifyAnyTxnLog", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
repeat
|
||||
_._,_ = nil
|
||||
until _
|
||||
local l0:(any)&(typeof(_)),l0:(any)|(any) = _,_
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
|
|
@ -1039,8 +1039,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "generalize_expectedTypes_with_proper_scope")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "fuzz_typepack_iter_follow")
|
||||
{
|
||||
ScopedFastFlag luauTxnLogTypePackIterator{"LuauTxnLogTypePackIterator", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local _
|
||||
local _ = _,_(),_(_)
|
||||
|
@ -1051,8 +1049,6 @@ local _ = _,_(),_(_)
|
|||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "fuzz_typepack_iter_follow_2")
|
||||
{
|
||||
ScopedFastFlag luauTxnLogTypePackIterator{"LuauTxnLogTypePackIterator", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function test(name, searchTerm)
|
||||
local found = string.find(name:lower(), searchTerm:lower())
|
||||
|
|
|
@ -544,11 +544,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "table_union_write_indirect")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_true_and_false")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : boolean
|
||||
local y1 : (true | false) = x -- OK
|
||||
|
@ -562,11 +557,6 @@ TEST_CASE_FIXTURE(Fixture, "union_true_and_false")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (number) -> number?
|
||||
local y : ((number?) -> number?) | ((number) -> number) = x -- OK
|
||||
|
@ -599,11 +589,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_generic_typepack_functions")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_mentioning_generics")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a,b>()
|
||||
local x : (a) -> a?
|
||||
|
@ -619,11 +604,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_mentioning_generics")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_mentioning_generic_typepacks")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<a...>()
|
||||
local x : (number, a...) -> (number?, a...)
|
||||
|
@ -639,11 +619,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_mentioning_generic_typepacks")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_arg_arities")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (number) -> number?
|
||||
local y : ((number?) -> number) | ((number | string) -> nil) = x -- OK
|
||||
|
@ -657,11 +632,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_arg_arities")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_result_arities")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : () -> (number | string)
|
||||
local y : (() -> number) | (() -> string) = x -- OK
|
||||
|
@ -675,11 +645,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_result_arities")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_variadics")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (...nil) -> (...number?)
|
||||
local y : ((...string?) -> (...number)) | ((...number?) -> nil) = x -- OK
|
||||
|
@ -693,11 +658,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_variadics")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_arg_variadics")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : (number) -> ()
|
||||
local y : ((number?) -> ()) | ((...number) -> ()) = x -- OK
|
||||
|
@ -711,11 +671,6 @@ TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_arg_variadics")
|
|||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_of_functions_with_mismatching_result_variadics")
|
||||
{
|
||||
ScopedFastFlag sffs[]{
|
||||
{"LuauSubtypeNormalizer", true},
|
||||
{"LuauTypeNormalization2", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local x : () -> (number?, ...number)
|
||||
local y : (() -> (...number)) | (() -> nil) = x -- OK
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -923,6 +923,21 @@ assert((function()
|
|||
return table.concat(res, ',')
|
||||
end)() == "6,8,10")
|
||||
|
||||
-- checking for a CFG issue that was missed in IR
|
||||
assert((function(b)
|
||||
local res = 0
|
||||
|
||||
if b then
|
||||
for i = 1, 100 do
|
||||
res += i
|
||||
end
|
||||
else
|
||||
res += 100000
|
||||
end
|
||||
|
||||
return res
|
||||
end)(true) == 5050)
|
||||
|
||||
-- typeof and type require an argument
|
||||
assert(pcall(typeof) == false)
|
||||
assert(pcall(type) == false)
|
||||
|
|
|
@ -42,7 +42,6 @@ AutocompleteTest.type_correct_suggestion_in_argument
|
|||
AutocompleteTest.type_correct_suggestion_in_table
|
||||
BuiltinTests.aliased_string_format
|
||||
BuiltinTests.assert_removes_falsy_types
|
||||
BuiltinTests.assert_removes_falsy_types2
|
||||
BuiltinTests.assert_removes_falsy_types_even_from_type_pack_tail_but_only_for_the_first_type
|
||||
BuiltinTests.assert_returns_false_and_string_iff_it_knows_the_first_argument_cannot_be_truthy
|
||||
BuiltinTests.bad_select_should_not_crash
|
||||
|
@ -53,9 +52,6 @@ BuiltinTests.dont_add_definitions_to_persistent_types
|
|||
BuiltinTests.find_capture_types
|
||||
BuiltinTests.find_capture_types2
|
||||
BuiltinTests.find_capture_types3
|
||||
BuiltinTests.gmatch_capture_types_balanced_escaped_parens
|
||||
BuiltinTests.gmatch_capture_types_default_capture
|
||||
BuiltinTests.gmatch_capture_types_parens_in_sets_are_ignored
|
||||
BuiltinTests.gmatch_definition
|
||||
BuiltinTests.ipairs_iterator_should_infer_types_and_type_check
|
||||
BuiltinTests.match_capture_types
|
||||
|
@ -80,7 +76,6 @@ BuiltinTests.table_insert_correctly_infers_type_of_array_3_args_overload
|
|||
BuiltinTests.table_pack
|
||||
BuiltinTests.table_pack_reduce
|
||||
BuiltinTests.table_pack_variadic
|
||||
BuiltinTests.tonumber_returns_optional_number_type
|
||||
DefinitionTests.class_definition_overload_metamethods
|
||||
DefinitionTests.class_definition_string_props
|
||||
DefinitionTests.declaring_generic_functions
|
||||
|
@ -103,7 +98,6 @@ GenericsTests.duplicate_generic_type_packs
|
|||
GenericsTests.duplicate_generic_types
|
||||
GenericsTests.generic_argument_count_too_few
|
||||
GenericsTests.generic_argument_count_too_many
|
||||
GenericsTests.generic_factories
|
||||
GenericsTests.generic_functions_should_be_memory_safe
|
||||
GenericsTests.generic_table_method
|
||||
GenericsTests.generic_type_pack_parentheses
|
||||
|
@ -140,7 +134,6 @@ NonstrictModeTests.parameters_having_type_any_are_optional
|
|||
NonstrictModeTests.table_dot_insert_and_recursive_calls
|
||||
NonstrictModeTests.table_props_are_any
|
||||
Normalize.cyclic_table_normalizes_sensibly
|
||||
Normalize.negations_of_classes
|
||||
ParseErrorRecovery.generic_type_list_recovery
|
||||
ParseErrorRecovery.recovery_of_parenthesized_expressions
|
||||
ParserTests.parse_nesting_based_end_detection_failsafe_earlier
|
||||
|
@ -160,16 +153,13 @@ ProvisionalTests.specialization_binds_with_prototypes_too_early
|
|||
ProvisionalTests.table_insert_with_a_singleton_argument
|
||||
ProvisionalTests.typeguard_inference_incomplete
|
||||
ProvisionalTests.weirditer_should_not_loop_forever
|
||||
ProvisionalTests.while_body_are_also_refined
|
||||
RefinementTest.apply_refinements_on_astexprindexexpr_whose_subscript_expr_is_constant_string
|
||||
RefinementTest.call_an_incompatible_function_after_using_typeguard
|
||||
RefinementTest.correctly_lookup_property_whose_base_was_previously_refined2
|
||||
RefinementTest.discriminate_on_properties_of_disjoint_tables_where_that_property_is_true_or_false
|
||||
RefinementTest.discriminate_tag
|
||||
RefinementTest.eliminate_subclasses_of_instance
|
||||
RefinementTest.else_with_no_explicit_expression_should_also_refine_the_tagged_union
|
||||
RefinementTest.falsiness_of_TruthyPredicate_narrows_into_nil
|
||||
RefinementTest.narrow_from_subclasses_of_instance_or_string_or_vector3
|
||||
RefinementTest.narrow_property_of_a_bounded_variable
|
||||
RefinementTest.nonoptional_type_can_narrow_to_nil_if_sense_is_true
|
||||
RefinementTest.refine_a_property_not_to_be_nil_through_an_intersection_table
|
||||
|
@ -179,7 +169,6 @@ RefinementTest.type_guard_narrowed_into_nothingness
|
|||
RefinementTest.type_narrow_for_all_the_userdata
|
||||
RefinementTest.type_narrow_to_vector
|
||||
RefinementTest.typeguard_cast_free_table_to_vector
|
||||
RefinementTest.typeguard_cast_instance_or_vector3_to_vector
|
||||
RefinementTest.typeguard_in_assert_position
|
||||
RefinementTest.typeguard_narrows_for_table
|
||||
RefinementTest.x_as_any_if_x_is_instance_elseif_x_is_table
|
||||
|
@ -192,8 +181,6 @@ TableTests.accidentally_checked_prop_in_opposite_branch
|
|||
TableTests.builtin_table_names
|
||||
TableTests.call_method
|
||||
TableTests.call_method_with_explicit_self_argument
|
||||
TableTests.cannot_augment_sealed_table
|
||||
TableTests.casting_sealed_tables_with_props_into_table_with_indexer
|
||||
TableTests.casting_tables_with_props_into_table_with_indexer3
|
||||
TableTests.casting_tables_with_props_into_table_with_indexer4
|
||||
TableTests.checked_prop_too_early
|
||||
|
@ -218,12 +205,10 @@ TableTests.function_calls_produces_sealed_table_given_unsealed_table
|
|||
TableTests.generic_table_instantiation_potential_regression
|
||||
TableTests.getmetatable_returns_pointer_to_metatable
|
||||
TableTests.give_up_after_one_metatable_index_look_up
|
||||
TableTests.hide_table_error_properties
|
||||
TableTests.indexer_on_sealed_table_must_unify_with_free_table
|
||||
TableTests.indexing_from_a_table_should_prefer_properties_when_possible
|
||||
TableTests.inequality_operators_imply_exactly_matching_types
|
||||
TableTests.infer_array_2
|
||||
TableTests.infer_indexer_from_value_property_in_literal
|
||||
TableTests.inferred_return_type_of_free_table
|
||||
TableTests.inferring_crazy_table_should_also_be_quick
|
||||
TableTests.instantiate_table_cloning_3
|
||||
|
@ -243,7 +228,6 @@ TableTests.only_ascribe_synthetic_names_at_module_scope
|
|||
TableTests.oop_indexer_works
|
||||
TableTests.oop_polymorphic
|
||||
TableTests.open_table_unification_2
|
||||
TableTests.persistent_sealed_table_is_immutable
|
||||
TableTests.property_lookup_through_tabletypevar_metatable
|
||||
TableTests.quantify_even_that_table_was_never_exported_at_all
|
||||
TableTests.quantify_metatables_of_metatables_of_table
|
||||
|
@ -252,7 +236,6 @@ TableTests.reasonable_error_when_adding_a_nonexistent_property_to_an_array_like_
|
|||
TableTests.result_is_always_any_if_lhs_is_any
|
||||
TableTests.result_is_bool_for_equality_operators_if_lhs_is_any
|
||||
TableTests.right_table_missing_key2
|
||||
TableTests.scalar_is_not_a_subtype_of_a_compatible_polymorphic_shape_type
|
||||
TableTests.shared_selfs
|
||||
TableTests.shared_selfs_from_free_param
|
||||
TableTests.shared_selfs_through_metatables
|
||||
|
@ -261,7 +244,6 @@ TableTests.table_function_check_use_after_free
|
|||
TableTests.table_indexing_error_location
|
||||
TableTests.table_insert_should_cope_with_optional_properties_in_nonstrict
|
||||
TableTests.table_insert_should_cope_with_optional_properties_in_strict
|
||||
TableTests.table_param_row_polymorphism_2
|
||||
TableTests.table_param_row_polymorphism_3
|
||||
TableTests.table_simple_call
|
||||
TableTests.table_subtyping_with_extra_props_dont_report_multiple_errors
|
||||
|
@ -276,12 +258,10 @@ TableTests.used_colon_correctly
|
|||
TableTests.used_colon_instead_of_dot
|
||||
TableTests.used_dot_instead_of_colon
|
||||
TableTests.used_dot_instead_of_colon_but_correctly
|
||||
ToDot.bound_table
|
||||
ToDot.function
|
||||
ToDot.table
|
||||
ToString.exhaustive_toString_of_cyclic_table
|
||||
ToString.function_type_with_argument_names_and_self
|
||||
ToString.function_type_with_argument_names_generic
|
||||
ToString.named_metatable_toStringNamedFunction
|
||||
ToString.toStringDetailed2
|
||||
ToString.toStringErrorPack
|
||||
ToString.toStringNamedFunction_generic_pack
|
||||
|
@ -297,10 +277,12 @@ TryUnifyTests.result_of_failed_typepack_unification_is_constrained
|
|||
TryUnifyTests.typepack_unification_should_trim_free_tails
|
||||
TryUnifyTests.variadics_should_use_reversed_properly
|
||||
TypeAliases.cannot_create_cyclic_type_with_unknown_module
|
||||
TypeAliases.corecursive_types_generic
|
||||
TypeAliases.forward_declared_alias_is_not_clobbered_by_prior_unification_with_any
|
||||
TypeAliases.forward_declared_alias_is_not_clobbered_by_prior_unification_with_any_2
|
||||
TypeAliases.generic_param_remap
|
||||
TypeAliases.mismatched_generic_type_param
|
||||
TypeAliases.mutually_recursive_types_errors
|
||||
TypeAliases.mutually_recursive_types_restriction_not_ok_1
|
||||
TypeAliases.mutually_recursive_types_restriction_not_ok_2
|
||||
TypeAliases.mutually_recursive_types_swapsies_not_ok
|
||||
|
@ -308,8 +290,6 @@ TypeAliases.recursive_types_restriction_not_ok
|
|||
TypeAliases.report_shadowed_aliases
|
||||
TypeAliases.stringify_optional_parameterized_alias
|
||||
TypeAliases.stringify_type_alias_of_recursive_template_table_type
|
||||
TypeAliases.stringify_type_alias_of_recursive_template_table_type2
|
||||
TypeAliases.type_alias_fwd_declaration_is_precise
|
||||
TypeAliases.type_alias_local_mutation
|
||||
TypeAliases.type_alias_local_rename
|
||||
TypeAliases.type_alias_of_an_imported_recursive_generic_type
|
||||
|
@ -337,15 +317,12 @@ TypeInferAnyError.metatable_of_any_can_be_a_table
|
|||
TypeInferClasses.can_read_prop_of_base_class_using_string
|
||||
TypeInferClasses.class_type_mismatch_with_name_conflict
|
||||
TypeInferClasses.classes_without_overloaded_operators_cannot_be_added
|
||||
TypeInferClasses.detailed_class_unification_error
|
||||
TypeInferClasses.higher_order_function_arguments_are_contravariant
|
||||
TypeInferClasses.index_instance_property
|
||||
TypeInferClasses.optional_class_field_access_error
|
||||
TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties
|
||||
TypeInferClasses.warn_when_prop_almost_matches
|
||||
TypeInferClasses.we_can_report_when_someone_is_trying_to_use_a_table_rather_than_a_class
|
||||
TypeInferFunctions.calling_function_with_anytypepack_doesnt_leak_free_types
|
||||
TypeInferFunctions.calling_function_with_incorrect_argument_type_yields_errors_spanning_argument
|
||||
TypeInferFunctions.cannot_hoist_interior_defns_into_signature
|
||||
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
|
||||
TypeInferFunctions.dont_infer_parameter_types_for_functions_from_their_call_site
|
||||
|
@ -374,9 +351,7 @@ TypeInferFunctions.return_type_by_overload
|
|||
TypeInferFunctions.too_few_arguments_variadic
|
||||
TypeInferFunctions.too_few_arguments_variadic_generic
|
||||
TypeInferFunctions.too_few_arguments_variadic_generic2
|
||||
TypeInferFunctions.too_many_arguments
|
||||
TypeInferFunctions.too_many_arguments_error_location
|
||||
TypeInferFunctions.too_many_return_values
|
||||
TypeInferFunctions.too_many_return_values_in_parentheses
|
||||
TypeInferFunctions.too_many_return_values_no_function
|
||||
TypeInferFunctions.vararg_function_is_quantified
|
||||
|
@ -399,8 +374,6 @@ TypeInferModules.module_type_conflict_instantiated
|
|||
TypeInferModules.require_a_variadic_function
|
||||
TypeInferModules.type_error_of_unknown_qualified_type
|
||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_another_overload_works
|
||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
|
||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
|
||||
TypeInferOOP.inferring_hundreds_of_self_calls_should_not_suffocate_memory
|
||||
TypeInferOOP.method_depends_on_table
|
||||
TypeInferOOP.methods_are_topologically_sorted
|
||||
|
@ -461,10 +434,7 @@ TypePackTests.type_pack_type_parameters
|
|||
TypePackTests.unify_variadic_tails_in_arguments
|
||||
TypePackTests.unify_variadic_tails_in_arguments_free
|
||||
TypePackTests.variadic_packs
|
||||
TypeReductionTests.discriminable_unions
|
||||
TypeReductionTests.intersections_with_negations
|
||||
TypeReductionTests.negations
|
||||
TypeReductionTests.unions_with_negations
|
||||
TypeSingletons.error_detailed_tagged_union_mismatch_bool
|
||||
TypeSingletons.error_detailed_tagged_union_mismatch_string
|
||||
TypeSingletons.function_call_with_singletons
|
||||
|
|
Loading…
Reference in a new issue