Merge branch 'upstream' into merge

This commit is contained in:
Aaron Weiss 2023-09-22 11:12:50 -07:00
commit 3882ab1488
50 changed files with 1332 additions and 671 deletions

View file

@ -203,8 +203,9 @@ struct ConstraintSolver
* the result. * the result.
* @param subType the sub-type to unify. * @param subType the sub-type to unify.
* @param superType the super-type to unify. * @param superType the super-type to unify.
* @returns optionally a unification too complex error if unification failed
*/ */
ErrorVec unify(NotNull<Scope> scope, Location location, TypeId subType, TypeId superType); std::optional<TypeError> unify(NotNull<Scope> scope, Location location, TypeId subType, TypeId superType);
/** /**
* Creates a new Unifier and performs a single unification operation. Commits * Creates a new Unifier and performs a single unification operation. Commits
@ -233,6 +234,15 @@ struct ConstraintSolver
void reportError(TypeErrorData&& data, const Location& location); void reportError(TypeErrorData&& data, const Location& location);
void reportError(TypeError e); void reportError(TypeError e);
/**
* Checks the existing set of constraints to see if there exist any that contain
* the provided free type, indicating that it is not yet ready to be replaced by
* one of its bounds.
* @param ty the free type that to check for related constraints
* @returns whether or not it is unsafe to replace the free type by one of its bounds
*/
bool hasUnresolvedConstraints(TypeId ty);
private: private:
/** Helper used by tryDispatch(SubtypeConstraint) and /** Helper used by tryDispatch(SubtypeConstraint) and

View file

@ -124,6 +124,8 @@ private:
SubtypingResult isCovariantWith(const PrimitiveType* subPrim, const TableType* superTable); SubtypingResult isCovariantWith(const PrimitiveType* subPrim, const TableType* superTable);
SubtypingResult isCovariantWith(const SingletonType* subSingleton, const TableType* superTable); SubtypingResult isCovariantWith(const SingletonType* subSingleton, const TableType* superTable);
SubtypingResult isCovariantWith(const TableIndexer& subIndexer, const TableIndexer& superIndexer);
SubtypingResult isCovariantWith(const NormalizedType* subNorm, const NormalizedType* superNorm); SubtypingResult isCovariantWith(const NormalizedType* subNorm, const NormalizedType* superNorm);
SubtypingResult isCovariantWith(const NormalizedClassType& subClass, const NormalizedClassType& superClass); SubtypingResult isCovariantWith(const NormalizedClassType& subClass, const NormalizedClassType& superClass);
SubtypingResult isCovariantWith(const NormalizedClassType& subClass, const TypeIds& superTables); SubtypingResult isCovariantWith(const NormalizedClassType& subClass, const TypeIds& superTables);

View file

@ -141,8 +141,6 @@ struct BlockedType
{ {
BlockedType(); BlockedType();
int index; int index;
static int DEPRECATED_nextIndex;
}; };
struct PrimitiveType struct PrimitiveType

View file

@ -54,7 +54,7 @@ struct TypeFamily
/// The reducer function for the type family. /// The reducer function for the type family.
std::function<TypeFamilyReductionResult<TypeId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>, std::function<TypeFamilyReductionResult<TypeId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>,
NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>)> NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>, ConstraintSolver*)>
reducer; reducer;
}; };
@ -68,7 +68,7 @@ struct TypePackFamily
/// The reducer function for the type pack family. /// The reducer function for the type pack family.
std::function<TypeFamilyReductionResult<TypePackId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>, std::function<TypeFamilyReductionResult<TypePackId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>,
NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>)> NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>, ConstraintSolver*)>
reducer; reducer;
}; };
@ -82,36 +82,68 @@ struct FamilyGraphReductionResult
}; };
/** /**
* Attempt to reduce all instances of any type or type pack family in the type * Attempt to reduce all instances of any type or type pack family in the type
* graph provided. * graph provided.
* *
* @param entrypoint the entry point to the type graph. * @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate * @param location the location the reduction is occurring at; used to populate
* type errors. * type errors.
* @param arena an arena to allocate types into. * @param arena an arena to allocate types into.
* @param builtins the built-in types. * @param builtins the built-in types.
* @param log a TxnLog to use. If one is provided, substitution will take place * @param normalizer the normalizer to use when normalizing types
* against the TxnLog, otherwise substitutions will directly mutate the type * @param log a TxnLog to use. If one is provided, substitution will take place
* graph. Do not provide the empty TxnLog, as a result. * against the TxnLog, otherwise substitutions will directly mutate the type
*/ * graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false); NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false);
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param normalizer the normalizer to use when normalizing types
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false);
/** /**
* Attempt to reduce all instances of any type or type pack family in the type * Attempt to reduce all instances of any type or type pack family in the type
* graph provided. * graph provided.
* *
* @param solver the constraint solver this reduction is being performed in.
* @param entrypoint the entry point to the type graph. * @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate * @param location the location the reduction is occurring at; used to populate
* type errors. * type errors.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param log a TxnLog to use. If one is provided, substitution will take place * @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type * against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result. * graph. Do not provide the empty TxnLog, as a result.
*/ */
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, FamilyGraphReductionResult reduceFamilies(
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false); NotNull<ConstraintSolver> solver, TypeId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log = nullptr, bool force = false);
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param solver the constraint solver this reduction is being performed in.
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypePackId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log = nullptr, bool force = false);
struct BuiltinTypeFamilies struct BuiltinTypeFamilies
{ {

View file

@ -4,6 +4,9 @@
#include "Luau/DenseHash.h" #include "Luau/DenseHash.h"
#include "Luau/NotNull.h" #include "Luau/NotNull.h"
#include "Type.h"
#include "TypeCheckLimits.h"
#include "TypeChecker2.h"
#include <optional> #include <optional>
#include <vector> #include <vector>
@ -26,16 +29,44 @@ enum class OccursCheckResult
Fail Fail
}; };
struct TypePairHash
{
size_t hashOne(Luau::TypeId key) const
{
return (uintptr_t(key) >> 4) ^ (uintptr_t(key) >> 9);
}
size_t hashOne(Luau::TypePackId key) const
{
return (uintptr_t(key) >> 4) ^ (uintptr_t(key) >> 9);
}
size_t operator()(const std::pair<Luau::TypeId, Luau::TypeId>& x) const
{
return hashOne(x.first) ^ (hashOne(x.second) << 1);
}
size_t operator()(const std::pair<Luau::TypePackId, Luau::TypePackId>& x) const
{
return hashOne(x.first) ^ (hashOne(x.second) << 1);
}
};
struct Unifier2 struct Unifier2
{ {
NotNull<TypeArena> arena; NotNull<TypeArena> arena;
NotNull<BuiltinTypes> builtinTypes; NotNull<BuiltinTypes> builtinTypes;
NotNull<Scope> scope;
NotNull<InternalErrorReporter> ice; NotNull<InternalErrorReporter> ice;
TypeCheckLimits limits;
DenseHashSet<std::pair<TypeId, TypeId>, TypePairHash> seenTypePairings{{nullptr, nullptr}};
DenseHashSet<std::pair<TypePackId, TypePackId>, TypePairHash> seenTypePackPairings{{nullptr, nullptr}};
int recursionCount = 0; int recursionCount = 0;
int recursionLimit = 0; int recursionLimit = 0;
Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> ice); Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<InternalErrorReporter> ice);
/** Attempt to commit the subtype relation subTy <: superTy to the type /** Attempt to commit the subtype relation subTy <: superTy to the type
* graph. * graph.
@ -50,11 +81,18 @@ struct Unifier2
* free TypePack to another and encounter an occurs check violation. * free TypePack to another and encounter an occurs check violation.
*/ */
bool unify(TypeId subTy, TypeId superTy); bool unify(TypeId subTy, TypeId superTy);
bool unify(TypeId subTy, const FunctionType* superFn);
bool unify(const UnionType* subUnion, TypeId superTy);
bool unify(TypeId subTy, const UnionType* superUnion);
bool unify(const IntersectionType* subIntersection, TypeId superTy);
bool unify(TypeId subTy, const IntersectionType* superIntersection);
bool unify(const TableType* subTable, const TableType* superTable);
bool unify(const MetatableType* subMetatable, const MetatableType* superMetatable);
// TODO think about this one carefully. We don't do unions or intersections of type packs // TODO think about this one carefully. We don't do unions or intersections of type packs
bool unify(TypePackId subTp, TypePackId superTp); bool unify(TypePackId subTp, TypePackId superTp);
std::optional<TypeId> generalize(NotNull<Scope> scope, TypeId ty); std::optional<TypeId> generalize(TypeId ty);
private: private:
/** /**

View file

@ -226,11 +226,16 @@ struct GenericTypeVisitor
{ {
if (visit(ty, *ftv)) if (visit(ty, *ftv))
{ {
LUAU_ASSERT(ftv->lowerBound); // TODO: Replace these if statements with assert()s when we
traverse(ftv->lowerBound); // delete FFlag::DebugLuauDeferredConstraintResolution.
//
// When the old solver is used, these pointers are always
// unused. When the new solver is used, they are never null.
if (ftv->lowerBound)
traverse(ftv->lowerBound);
LUAU_ASSERT(ftv->upperBound); if (ftv->upperBound)
traverse(ftv->upperBound); traverse(ftv->upperBound);
} }
} }
else else

View file

@ -12,7 +12,6 @@
#include <algorithm> #include <algorithm>
LUAU_FASTFLAG(DebugLuauReadWriteProperties) LUAU_FASTFLAG(DebugLuauReadWriteProperties)
LUAU_FASTFLAGVARIABLE(FixFindBindingAtFunctionName, false);
namespace Luau namespace Luau
{ {
@ -151,19 +150,12 @@ struct FindNode : public AstVisitor
bool visit(AstStatFunction* node) override bool visit(AstStatFunction* node) override
{ {
if (FFlag::FixFindBindingAtFunctionName) visit(static_cast<AstNode*>(node));
{ if (node->name->location.contains(pos))
visit(static_cast<AstNode*>(node)); node->name->visit(this);
if (node->name->location.contains(pos)) else if (node->func->location.contains(pos))
node->name->visit(this); node->func->visit(this);
else if (node->func->location.contains(pos)) return false;
node->func->visit(this);
return false;
}
else
{
return AstVisitor::visit(node);
}
} }
bool visit(AstStatBlock* block) override bool visit(AstStatBlock* block) override
@ -208,19 +200,12 @@ struct FindFullAncestry final : public AstVisitor
bool visit(AstStatFunction* node) override bool visit(AstStatFunction* node) override
{ {
if (FFlag::FixFindBindingAtFunctionName) visit(static_cast<AstNode*>(node));
{ if (node->name->location.contains(pos))
visit(static_cast<AstNode*>(node)); node->name->visit(this);
if (node->name->location.contains(pos)) else if (node->func->location.contains(pos))
node->name->visit(this); node->func->visit(this);
else if (node->func->location.contains(pos)) return false;
node->func->visit(this);
return false;
}
else
{
return AstVisitor::visit(node);
}
} }
bool visit(AstNode* node) override bool visit(AstNode* node) override

View file

@ -473,7 +473,7 @@ void ConstraintSolver::finalizeModule()
rootScope->returnType = anyifyModuleReturnTypePackGenerics(*returnType); rootScope->returnType = anyifyModuleReturnTypePackGenerics(*returnType);
} }
Unifier2 u2{NotNull{arena}, builtinTypes, NotNull{&iceReporter}}; Unifier2 u2{NotNull{arena}, builtinTypes, rootScope, NotNull{&iceReporter}};
std::deque<TypeAndLocation> queue; std::deque<TypeAndLocation> queue;
for (auto& [name, binding] : rootScope->bindings) for (auto& [name, binding] : rootScope->bindings)
@ -495,7 +495,7 @@ void ConstraintSolver::finalizeModule()
FreeTypeSearcher fts{&queue, binding.location}; FreeTypeSearcher fts{&queue, binding.location};
fts.traverse(ty); fts.traverse(ty);
auto result = u2.generalize(rootScope, ty); auto result = u2.generalize(ty);
if (!result) if (!result)
reportError(CodeTooComplex{}, binding.location); reportError(CodeTooComplex{}, binding.location);
} }
@ -586,9 +586,9 @@ bool ConstraintSolver::tryDispatch(const GeneralizationConstraint& c, NotNull<co
std::optional<QuantifierResult> generalized; std::optional<QuantifierResult> generalized;
Unifier2 u2{NotNull{arena}, builtinTypes, NotNull{&iceReporter}}; Unifier2 u2{NotNull{arena}, builtinTypes, constraint->scope, NotNull{&iceReporter}};
std::optional<TypeId> generalizedTy = u2.generalize(constraint->scope, c.sourceType); std::optional<TypeId> generalizedTy = u2.generalize(c.sourceType);
if (generalizedTy) if (generalizedTy)
generalized = QuantifierResult{*generalizedTy}; // FIXME insertedGenerics and insertedGenericPacks generalized = QuantifierResult{*generalizedTy}; // FIXME insertedGenerics and insertedGenericPacks
else else
@ -1310,7 +1310,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
TypePackId argsPack = follow(c.argsPack); TypePackId argsPack = follow(c.argsPack);
TypePackId result = follow(c.result); TypePackId result = follow(c.result);
if (isBlocked(fn)) if (isBlocked(fn) || hasUnresolvedConstraints(fn))
{ {
return block(c.fn, constraint); return block(c.fn, constraint);
} }
@ -1381,98 +1381,13 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
} }
TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope.get(), argsPack, c.result}); TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope.get(), argsPack, c.result});
Unifier2 u2{NotNull{arena}, builtinTypes, constraint->scope, NotNull{&iceReporter}};
const NormalizedType* normFn = normalizer->normalize(fn); const bool occursCheckPassed = u2.unify(fn, inferredTy);
if (!normFn)
{
reportError(UnificationTooComplex{}, constraint->location);
return true;
}
// TODO: It would be nice to not need to convert the normalized type back to if (occursCheckPassed && c.callSite)
// an intersection and flatten it. (*c.astOverloadResolvedTypes)[c.callSite] = inferredTy;
TypeId normFnTy = normalizer->typeFromNormal(*normFn);
std::vector<TypeId> overloads = flattenIntersection(normFnTy);
std::vector<TypeId> arityMatchingOverloads;
std::optional<TxnLog> bestOverloadLog;
for (TypeId overload : overloads)
{
overload = follow(overload);
std::optional<TypeId> instantiated = instantiate(builtinTypes, arena, NotNull{&limits}, constraint->scope, overload);
if (!instantiated.has_value())
{
reportError(UnificationTooComplex{}, constraint->location);
return true;
}
Unifier u{normalizer, constraint->scope, Location{}, Covariant};
u.enableNewSolver();
u.tryUnify(*instantiated, inferredTy, /* isFunctionCall */ true);
if (!u.blockedTypes.empty() || !u.blockedTypePacks.empty())
{
for (TypeId bt : u.blockedTypes)
block(bt, constraint);
for (TypePackId btp : u.blockedTypePacks)
block(btp, constraint);
return false;
}
if (const auto& e = hasUnificationTooComplex(u.errors))
reportError(*e);
const auto& e = hasCountMismatch(u.errors);
bool areArgumentsCompatible = (!e || get<CountMismatch>(*e)->context != CountMismatch::Context::Arg) && get<FunctionType>(*instantiated);
if (areArgumentsCompatible)
arityMatchingOverloads.push_back(*instantiated);
if (u.errors.empty())
{
if (c.callSite)
(*c.astOverloadResolvedTypes)[c.callSite] = *instantiated;
// This overload has no errors, so override the bestOverloadLog and use this one.
bestOverloadLog = std::move(u.log);
break;
}
else if (areArgumentsCompatible && !bestOverloadLog)
{
// This overload is erroneous. Replace its inferences with `any` iff there isn't already a TxnLog.
bestOverloadLog = std::move(u.log);
}
}
if (arityMatchingOverloads.size() == 1 && c.callSite)
{
// In the name of better error messages in the type checker, we provide
// it with an instantiated function signature that matched arity, but
// not the requisite subtyping requirements. This makes errors better in
// cases where only one overload fit from an arity perspective.
(*c.astOverloadResolvedTypes)[c.callSite] = arityMatchingOverloads.at(0);
}
// We didn't find any overload that were a viable candidate, so replace the inferences with `any`.
if (!bestOverloadLog)
{
Unifier u{normalizer, constraint->scope, Location{}, Covariant};
u.enableNewSolver();
u.tryUnify(inferredTy, builtinTypes->anyType);
u.tryUnify(fn, builtinTypes->anyType);
bestOverloadLog = std::move(u.log);
}
const auto [changedTypes, changedPacks] = bestOverloadLog->getChanges();
bestOverloadLog->commit();
unblock(changedTypes, constraint->location);
unblock(changedPacks, constraint->location);
unblock(c.result, constraint->location); unblock(c.result, constraint->location);
InstantiationQueuer queuer{constraint->scope, constraint->location, this}; InstantiationQueuer queuer{constraint->scope, constraint->location, this};
@ -1994,7 +1909,7 @@ bool ConstraintSolver::tryDispatch(const ReduceConstraint& c, NotNull<const Cons
{ {
TypeId ty = follow(c.ty); TypeId ty = follow(c.ty);
FamilyGraphReductionResult result = FamilyGraphReductionResult result =
reduceFamilies(ty, constraint->location, NotNull{arena}, builtinTypes, constraint->scope, normalizer, nullptr, force); reduceFamilies(NotNull{this}, ty, constraint->location, constraint->scope, nullptr, force);
for (TypeId r : result.reducedTypes) for (TypeId r : result.reducedTypes)
unblock(r, constraint->location); unblock(r, constraint->location);
@ -2018,7 +1933,7 @@ bool ConstraintSolver::tryDispatch(const ReducePackConstraint& c, NotNull<const
{ {
TypePackId tp = follow(c.tp); TypePackId tp = follow(c.tp);
FamilyGraphReductionResult result = FamilyGraphReductionResult result =
reduceFamilies(tp, constraint->location, NotNull{arena}, builtinTypes, constraint->scope, normalizer, nullptr, force); reduceFamilies(NotNull{this}, tp, constraint->location, constraint->scope, nullptr, force);
for (TypeId r : result.reducedTypes) for (TypeId r : result.reducedTypes)
unblock(r, constraint->location); unblock(r, constraint->location);
@ -2241,13 +2156,13 @@ bool ConstraintSolver::tryDispatchIterableFunction(
const TypePackId nextRetPack = arena->addTypePack(TypePack{{retIndex}, valueTailTy}); const TypePackId nextRetPack = arena->addTypePack(TypePack{{retIndex}, valueTailTy});
const TypeId expectedNextTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope, nextArgPack, nextRetPack}); const TypeId expectedNextTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope, nextArgPack, nextRetPack});
ErrorVec errors = unify(constraint->scope, constraint->location, nextTy, expectedNextTy); std::optional<TypeError> error = unify(constraint->scope, constraint->location, nextTy, expectedNextTy);
// if there are no errors from unifying the two, we can pass forward the expected type as our selected resolution. // if there are no errors from unifying the two, we can pass forward the expected type as our selected resolution.
if (errors.empty()) if (!error)
{
(*c.astForInNextTypes)[c.nextAstFragment] = expectedNextTy; (*c.astForInNextTypes)[c.nextAstFragment] = expectedNextTy;
} else
reportError(*error);
auto it = begin(nextRetPack); auto it = begin(nextRetPack);
std::vector<TypeId> modifiedNextRetHead; std::vector<TypeId> modifiedNextRetHead;
@ -2440,7 +2355,7 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
template<typename TID> template<typename TID>
bool ConstraintSolver::tryUnify(NotNull<const Constraint> constraint, TID subTy, TID superTy) bool ConstraintSolver::tryUnify(NotNull<const Constraint> constraint, TID subTy, TID superTy)
{ {
Unifier2 u2{NotNull{arena}, builtinTypes, NotNull{&iceReporter}}; Unifier2 u2{NotNull{arena}, builtinTypes, constraint->scope, NotNull{&iceReporter}};
bool success = u2.unify(subTy, superTy); bool success = u2.unify(subTy, superTy);
@ -2670,14 +2585,14 @@ bool ConstraintSolver::isBlocked(NotNull<const Constraint> constraint)
return blockedIt != blockedConstraints.end() && blockedIt->second > 0; return blockedIt != blockedConstraints.end() && blockedIt->second > 0;
} }
ErrorVec ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypeId subType, TypeId superType) std::optional<TypeError> ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypeId subType, TypeId superType)
{ {
Unifier2 u2{NotNull{arena}, builtinTypes, NotNull{&iceReporter}}; Unifier2 u2{NotNull{arena}, builtinTypes, scope, NotNull{&iceReporter}};
const bool ok = u2.unify(subType, superType); const bool ok = u2.unify(subType, superType);
if (!ok) if (!ok)
reportError(UnificationTooComplex{}, location); return {{location, UnificationTooComplex{}}};
unblock(subType, Location{}); unblock(subType, Location{});
unblock(superType, Location{}); unblock(superType, Location{});
@ -2687,7 +2602,7 @@ ErrorVec ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypeId
ErrorVec ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypePackId subPack, TypePackId superPack) ErrorVec ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypePackId subPack, TypePackId superPack)
{ {
Unifier2 u{arena, builtinTypes, NotNull{&iceReporter}}; Unifier2 u{arena, builtinTypes, scope, NotNull{&iceReporter}};
u.unify(subPack, superPack); u.unify(subPack, superPack);
@ -2762,6 +2677,50 @@ void ConstraintSolver::reportError(TypeError e)
errors.back().moduleName = currentModuleName; errors.back().moduleName = currentModuleName;
} }
struct ContainsType : TypeOnceVisitor
{
TypeId needle;
bool found = false;
explicit ContainsType(TypeId needle)
: needle(needle)
{
}
bool visit(TypeId) override
{
return !found; // traverse into the type iff we have yet to find the needle
}
bool visit(TypeId ty, const FreeType&) override
{
found |= ty == needle;
return false;
}
};
bool ConstraintSolver::hasUnresolvedConstraints(TypeId ty)
{
if (!get<FreeType>(ty) || unsolvedConstraints.empty())
return false; // if it's not free, it never has any unresolved constraints, maybe?
ContainsType containsTy{ty};
for (auto constraint : unsolvedConstraints)
{
if (auto sc = get<SubtypeConstraint>(*constraint))
{
containsTy.traverse(sc->subType);
containsTy.traverse(sc->superType);
if (containsTy.found)
return true;
}
}
return containsTy.found;
}
TypeId ConstraintSolver::errorRecoveryType() const TypeId ConstraintSolver::errorRecoveryType() const
{ {
return builtinTypes->errorRecoveryType(); return builtinTypes->errorRecoveryType();

View file

@ -17,7 +17,6 @@ LUAU_FASTFLAGVARIABLE(DebugLuauCheckNormalizeInvariant, false)
// This could theoretically be 2000 on amd64, but x86 requires this. // This could theoretically be 2000 on amd64, but x86 requires this.
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200); LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000); LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000);
LUAU_FASTFLAGVARIABLE(LuauNormalizeBlockedTypes, false);
LUAU_FASTFLAGVARIABLE(LuauNormalizeCyclicUnions, false); LUAU_FASTFLAGVARIABLE(LuauNormalizeCyclicUnions, false);
LUAU_FASTFLAG(LuauTransitiveSubtyping) LUAU_FASTFLAG(LuauTransitiveSubtyping)
LUAU_FASTFLAG(DebugLuauReadWriteProperties) LUAU_FASTFLAG(DebugLuauReadWriteProperties)
@ -642,7 +641,7 @@ static bool areNormalizedClasses(const NormalizedClassType& tys)
static bool isPlainTyvar(TypeId ty) static bool isPlainTyvar(TypeId ty)
{ {
return (get<FreeType>(ty) || get<GenericType>(ty) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(ty)) || return (get<FreeType>(ty) || get<GenericType>(ty) || get<BlockedType>(ty) ||
get<PendingExpansionType>(ty) || get<TypeFamilyInstanceType>(ty)); get<PendingExpansionType>(ty) || get<TypeFamilyInstanceType>(ty));
} }
@ -1515,7 +1514,7 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, std::unor
} }
else if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(here.tops)) else if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(here.tops))
return true; return true;
else if (get<GenericType>(there) || get<FreeType>(there) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there)) || else if (get<GenericType>(there) || get<FreeType>(there) || get<BlockedType>(there) ||
get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there)) get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there))
{ {
if (tyvarIndex(there) <= ignoreSmallerTyvars) if (tyvarIndex(there) <= ignoreSmallerTyvars)
@ -1584,8 +1583,6 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, std::unor
if (!unionNormals(here, *tn)) if (!unionNormals(here, *tn))
return false; return false;
} }
else if (!FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there))
LUAU_ASSERT(!"Internal error: Trying to normalize a BlockedType");
else if (get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there)) else if (get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there))
{ {
// nothing // nothing
@ -2658,7 +2655,7 @@ bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, std::
return false; return false;
return true; return true;
} }
else if (get<GenericType>(there) || get<FreeType>(there) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there)) || else if (get<GenericType>(there) || get<FreeType>(there) || get<BlockedType>(there) ||
get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there)) get<PendingExpansionType>(there) || get<TypeFamilyInstanceType>(there))
{ {
NormalizedType thereNorm{builtinTypes}; NormalizedType thereNorm{builtinTypes};

View file

@ -438,7 +438,7 @@ SubtypingResult Subtyping::isCovariantWith(TypePackId subTp, TypePackId superTp)
results.push_back({false}); results.push_back({false});
} }
else else
LUAU_ASSERT(0); // TODO iceReporter->ice("Subtyping test encountered the unexpected type pack: " + toString(*superTail));
} }
return SubtypingResult::all(results); return SubtypingResult::all(results);
@ -736,13 +736,33 @@ SubtypingResult Subtyping::isCovariantWith(const TableType* subTable, const Tabl
{ {
SubtypingResult result{true}; SubtypingResult result{true};
if (subTable->props.empty() && !subTable->indexer && superTable->indexer)
return {false};
for (const auto& [name, prop] : superTable->props) for (const auto& [name, prop] : superTable->props)
{ {
auto it = subTable->props.find(name); std::vector<SubtypingResult> results;
if (it != subTable->props.end()) if (auto it = subTable->props.find(name); it != subTable->props.end())
result.andAlso(isInvariantWith(prop.type(), it->second.type())); results.push_back(isInvariantWith(it->second.type(), prop.type()));
if (subTable->indexer)
{
if (isInvariantWith(subTable->indexer->indexType, builtinTypes->stringType).isSubtype)
results.push_back(isInvariantWith(subTable->indexer->indexResultType, prop.type()));
}
if (results.empty())
return {false};
result.andAlso(SubtypingResult::all(results));
}
if (superTable->indexer)
{
if (subTable->indexer)
result.andAlso(isInvariantWith(*subTable->indexer, *superTable->indexer));
else else
return SubtypingResult{false}; return {false};
} }
return result; return result;
@ -750,10 +770,7 @@ SubtypingResult Subtyping::isCovariantWith(const TableType* subTable, const Tabl
SubtypingResult Subtyping::isCovariantWith(const MetatableType* subMt, const MetatableType* superMt) SubtypingResult Subtyping::isCovariantWith(const MetatableType* subMt, const MetatableType* superMt)
{ {
return SubtypingResult::all({ return isCovariantWith(subMt->table, superMt->table).andAlso(isCovariantWith(subMt->metatable, superMt->metatable));
isCovariantWith(subMt->table, superMt->table),
isCovariantWith(subMt->metatable, superMt->metatable),
});
} }
SubtypingResult Subtyping::isCovariantWith(const MetatableType* subMt, const TableType* superTable) SubtypingResult Subtyping::isCovariantWith(const MetatableType* subMt, const TableType* superTable)
@ -852,6 +869,11 @@ SubtypingResult Subtyping::isCovariantWith(const SingletonType* subSingleton, co
return result; return result;
} }
SubtypingResult Subtyping::isCovariantWith(const TableIndexer& subIndexer, const TableIndexer& superIndexer)
{
return isInvariantWith(subIndexer.indexType, superIndexer.indexType).andAlso(isInvariantWith(superIndexer.indexResultType, subIndexer.indexResultType));
}
SubtypingResult Subtyping::isCovariantWith(const NormalizedType* subNorm, const NormalizedType* superNorm) SubtypingResult Subtyping::isCovariantWith(const NormalizedType* subNorm, const NormalizedType* superNorm)
{ {
if (!subNorm || !superNorm) if (!subNorm || !superNorm)

View file

@ -82,8 +82,16 @@ struct FindCyclicTypes final : TypeVisitor
if (FFlag::DebugLuauDeferredConstraintResolution) if (FFlag::DebugLuauDeferredConstraintResolution)
{ {
traverse(ft.lowerBound); // TODO: Replace these if statements with assert()s when we
traverse(ft.upperBound); // delete FFlag::DebugLuauDeferredConstraintResolution.
//
// When the old solver is used, these pointers are always
// unused. When the new solver is used, they are never null.
if (ft.lowerBound)
traverse(ft.lowerBound);
if (ft.upperBound)
traverse(ft.upperBound);
} }
return false; return false;
@ -442,7 +450,9 @@ struct TypeStringifier
{ {
state.result.invalid = true; state.result.invalid = true;
if (FFlag::DebugLuauDeferredConstraintResolution) // TODO: ftv.lowerBound and ftv.upperBound should always be non-nil when
// the new solver is used. This can be replaced with an assert.
if (FFlag::DebugLuauDeferredConstraintResolution && ftv.lowerBound && ftv.upperBound)
{ {
const TypeId lowerBound = follow(ftv.lowerBound); const TypeId lowerBound = follow(ftv.lowerBound);
const TypeId upperBound = follow(ftv.upperBound); const TypeId upperBound = follow(ftv.upperBound);

View file

@ -25,7 +25,6 @@ LUAU_FASTINTVARIABLE(LuauTypeMaximumStringifierLength, 500)
LUAU_FASTINTVARIABLE(LuauTableTypeMaximumStringifierLength, 0) LUAU_FASTINTVARIABLE(LuauTableTypeMaximumStringifierLength, 0)
LUAU_FASTINT(LuauTypeInferRecursionLimit) LUAU_FASTINT(LuauTypeInferRecursionLimit)
LUAU_FASTFLAG(LuauInstantiateInSubtyping) LUAU_FASTFLAG(LuauInstantiateInSubtyping)
LUAU_FASTFLAG(LuauNormalizeBlockedTypes)
LUAU_FASTFLAG(DebugLuauReadWriteProperties) LUAU_FASTFLAG(DebugLuauReadWriteProperties)
LUAU_FASTFLAGVARIABLE(LuauInitializeStringMetatableInGlobalTypes, false) LUAU_FASTFLAGVARIABLE(LuauInitializeStringMetatableInGlobalTypes, false)
@ -522,12 +521,10 @@ GenericType::GenericType(Scope* scope, const Name& name)
} }
BlockedType::BlockedType() BlockedType::BlockedType()
: index(FFlag::LuauNormalizeBlockedTypes ? Unifiable::freshIndex() : ++DEPRECATED_nextIndex) : index(Unifiable::freshIndex())
{ {
} }
int BlockedType::DEPRECATED_nextIndex = 0;
PendingExpansionType::PendingExpansionType( PendingExpansionType::PendingExpansionType(
std::optional<AstName> prefix, AstName name, std::vector<TypeId> typeArguments, std::vector<TypePackId> packArguments) std::optional<AstName> prefix, AstName name, std::vector<TypeId> typeArguments, std::vector<TypePackId> packArguments)
: prefix(prefix) : prefix(prefix)

View file

@ -11,6 +11,7 @@
#include "Luau/Instantiation.h" #include "Luau/Instantiation.h"
#include "Luau/Metamethods.h" #include "Luau/Metamethods.h"
#include "Luau/Normalize.h" #include "Luau/Normalize.h"
#include "Luau/Subtyping.h"
#include "Luau/ToString.h" #include "Luau/ToString.h"
#include "Luau/TxnLog.h" #include "Luau/TxnLog.h"
#include "Luau/Type.h" #include "Luau/Type.h"
@ -241,6 +242,7 @@ struct TypeChecker2
DenseHashSet<TypeId> noTypeFamilyErrors{nullptr}; DenseHashSet<TypeId> noTypeFamilyErrors{nullptr};
Normalizer normalizer; Normalizer normalizer;
Subtyping subtyping;
TypeChecker2(NotNull<BuiltinTypes> builtinTypes, NotNull<UnifierSharedState> unifierState, NotNull<TypeCheckLimits> limits, DcrLogger* logger, TypeChecker2(NotNull<BuiltinTypes> builtinTypes, NotNull<UnifierSharedState> unifierState, NotNull<TypeCheckLimits> limits, DcrLogger* logger,
const SourceModule* sourceModule, Module* module) const SourceModule* sourceModule, Module* module)
@ -251,6 +253,7 @@ struct TypeChecker2
, sourceModule(sourceModule) , sourceModule(sourceModule)
, module(module) , module(module)
, normalizer{&testArena, builtinTypes, unifierState, /* cacheInhabitance */ true} , normalizer{&testArena, builtinTypes, unifierState, /* cacheInhabitance */ true}
, subtyping{builtinTypes, NotNull{&testArena}, NotNull{&normalizer}, NotNull{unifierState->iceHandler}, NotNull{module->getModuleScope().get()}}
{ {
} }
@ -1080,10 +1083,24 @@ struct TypeChecker2
TypeId* originalCallTy = module->astOriginalCallTypes.find(call); TypeId* originalCallTy = module->astOriginalCallTypes.find(call);
TypeId* selectedOverloadTy = module->astOverloadResolvedTypes.find(call); TypeId* selectedOverloadTy = module->astOverloadResolvedTypes.find(call);
if (!originalCallTy && !selectedOverloadTy) if (!originalCallTy)
return; return;
TypeId fnTy = follow(selectedOverloadTy ? *selectedOverloadTy : *originalCallTy); TypeId fnTy = *originalCallTy;
if (selectedOverloadTy)
{
SubtypingResult result = subtyping.isSubtype(*originalCallTy, *selectedOverloadTy);
if (result.isSubtype)
fnTy = *selectedOverloadTy;
if (result.normalizationTooComplex)
{
reportError(NormalizationTooComplex{}, call->func->location);
return;
}
}
fnTy = follow(fnTy);
if (get<AnyType>(fnTy) || get<ErrorType>(fnTy) || get<NeverType>(fnTy)) if (get<AnyType>(fnTy) || get<ErrorType>(fnTy) || get<NeverType>(fnTy))
return; return;
else if (isOptional(fnTy)) else if (isOptional(fnTy))

View file

@ -2,6 +2,7 @@
#include "Luau/TypeFamily.h" #include "Luau/TypeFamily.h"
#include "Luau/ConstraintSolver.h"
#include "Luau/DenseHash.h" #include "Luau/DenseHash.h"
#include "Luau/Instantiation.h" #include "Luau/Instantiation.h"
#include "Luau/Normalize.h" #include "Luau/Normalize.h"
@ -55,31 +56,38 @@ struct InstanceCollector : TypeOnceVisitor
struct FamilyReducer struct FamilyReducer
{ {
ConstraintSolver* solver = nullptr;
// Conditionally from the solver if one is provided.
NotNull<TypeArena> arena;
NotNull<BuiltinTypes> builtins;
NotNull<Normalizer> normalizer;
std::deque<TypeId> queuedTys; std::deque<TypeId> queuedTys;
std::deque<TypePackId> queuedTps; std::deque<TypePackId> queuedTps;
DenseHashSet<const void*> irreducible{nullptr}; DenseHashSet<const void*> irreducible{nullptr};
FamilyGraphReductionResult result; FamilyGraphReductionResult result;
Location location;
NotNull<TypeArena> arena;
NotNull<BuiltinTypes> builtins;
TxnLog* parentLog = nullptr; TxnLog* parentLog = nullptr;
TxnLog log; TxnLog log;
bool force = false; bool force = false;
// Local to the constraint being reduced.
Location location;
NotNull<Scope> scope; NotNull<Scope> scope;
NotNull<Normalizer> normalizer;
FamilyReducer(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, NotNull<TypeArena> arena, FamilyReducer(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, NotNull<TypeArena> arena,
NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* parentLog = nullptr, bool force = false) NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver, TxnLog* parentLog = nullptr, bool force = false)
: queuedTys(std::move(queuedTys)) : solver(solver)
, queuedTps(std::move(queuedTps))
, location(location)
, arena(arena) , arena(arena)
, builtins(builtins) , builtins(builtins)
, normalizer(normalizer)
, queuedTys(std::move(queuedTys))
, queuedTps(std::move(queuedTps))
, parentLog(parentLog) , parentLog(parentLog)
, log(parentLog) , log(parentLog)
, force(force) , force(force)
, location(location)
, scope(scope) , scope(scope)
, normalizer(normalizer)
{ {
} }
@ -234,7 +242,7 @@ struct FamilyReducer
return; return;
TypeFamilyReductionResult<TypeId> result = TypeFamilyReductionResult<TypeId> result =
tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer); tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer, solver);
handleFamilyReduction(subject, result); handleFamilyReduction(subject, result);
} }
} }
@ -253,7 +261,7 @@ struct FamilyReducer
return; return;
TypeFamilyReductionResult<TypePackId> result = TypeFamilyReductionResult<TypePackId> result =
tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer); tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer, solver);
handleFamilyReduction(subject, result); handleFamilyReduction(subject, result);
} }
} }
@ -268,9 +276,10 @@ struct FamilyReducer
}; };
static FamilyGraphReductionResult reduceFamiliesInternal(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, static FamilyGraphReductionResult reduceFamiliesInternal(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location,
NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log, bool force) NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver,
TxnLog* log, bool force)
{ {
FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), location, arena, builtins, scope, normalizer, log, force}; FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), location, arena, builtins, scope, normalizer, solver, log, force};
int iterationCount = 0; int iterationCount = 0;
while (!reducer.done()) while (!reducer.done())
@ -305,7 +314,7 @@ FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location,
if (collector.tys.empty() && collector.tps.empty()) if (collector.tys.empty() && collector.tps.empty())
return {}; return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, log, force); return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, nullptr, log, force);
} }
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
@ -325,16 +334,57 @@ FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location locati
if (collector.tys.empty() && collector.tps.empty()) if (collector.tys.empty() && collector.tps.empty())
return {}; return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, log, force); return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, nullptr, log, force);
} }
bool isPending(TypeId ty, NotNull<TxnLog> log) FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypeId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log, bool force)
{ {
return log->is<FreeType>(ty) || log->is<BlockedType>(ty) || log->is<PendingExpansionType>(ty) || log->is<TypeFamilyInstanceType>(ty); InstanceCollector collector;
try
{
collector.traverse(entrypoint);
}
catch (RecursionLimitException&)
{
return FamilyGraphReductionResult{};
}
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, solver->arena, solver->builtinTypes, scope, solver->normalizer, solver.get(), log, force);
}
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypePackId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log, bool force)
{
InstanceCollector collector;
try
{
collector.traverse(entrypoint);
}
catch (RecursionLimitException&)
{
return FamilyGraphReductionResult{};
}
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, solver->arena, solver->builtinTypes, scope, solver->normalizer, solver.get(), log, force);
}
bool isPending(TypeId ty, NotNull<TxnLog> log, ConstraintSolver* solver)
{
return log->is<BlockedType>(ty) || log->is<PendingExpansionType>(ty) || log->is<TypeFamilyInstanceType>(ty)
|| (solver && solver->hasUnresolvedConstraints(ty));
} }
TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeArena> arena, TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeArena> arena,
NotNull<BuiltinTypes> builtins, NotNull<TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer) NotNull<BuiltinTypes> builtins, NotNull<TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver)
{ {
if (typeParams.size() != 2 || !packParams.empty()) if (typeParams.size() != 2 || !packParams.empty())
{ {
@ -367,11 +417,11 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, st
{ {
return {builtins->neverType, false, {}, {}}; return {builtins->neverType, false, {}, {}};
} }
else if (isPending(lhsTy, log)) else if (isPending(lhsTy, log, solver))
{ {
return {std::nullopt, false, {lhsTy}, {}}; return {std::nullopt, false, {lhsTy}, {}};
} }
else if (isPending(rhsTy, log)) else if (isPending(rhsTy, log, solver))
{ {
return {std::nullopt, false, {rhsTy}, {}}; return {std::nullopt, false, {rhsTy}, {}};
} }
@ -391,7 +441,7 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, st
if (!addMm) if (!addMm)
return {std::nullopt, true, {}, {}}; return {std::nullopt, true, {}, {}};
if (isPending(log->follow(*addMm), log)) if (isPending(log->follow(*addMm), log, solver))
return {std::nullopt, false, {log->follow(*addMm)}, {}}; return {std::nullopt, false, {log->follow(*addMm)}, {}};
const FunctionType* mmFtv = log->get<FunctionType>(log->follow(*addMm)); const FunctionType* mmFtv = log->get<FunctionType>(log->follow(*addMm));

View file

@ -326,7 +326,7 @@ ErrorSuppression shouldSuppressErrors(NotNull<Normalizer> normalizer, TypePackId
} }
// check the tail if we have one and it's finite // check the tail if we have one and it's finite
if (tail && finite(*tail)) if (tail && tp != tail && finite(*tail))
return shouldSuppressErrors(normalizer, *tail); return shouldSuppressErrors(normalizer, *tail);
return ErrorSuppression::DoNotSuppress; return ErrorSuppression::DoNotSuppress;

View file

@ -22,7 +22,6 @@ LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false)
LUAU_FASTFLAGVARIABLE(LuauMaintainScopesInUnifier, false) LUAU_FASTFLAGVARIABLE(LuauMaintainScopesInUnifier, false)
LUAU_FASTFLAGVARIABLE(LuauTransitiveSubtyping, false) LUAU_FASTFLAGVARIABLE(LuauTransitiveSubtyping, false)
LUAU_FASTFLAGVARIABLE(LuauOccursIsntAlwaysFailure, false) LUAU_FASTFLAGVARIABLE(LuauOccursIsntAlwaysFailure, false)
LUAU_FASTFLAG(LuauNormalizeBlockedTypes)
LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls) LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls)
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution) LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
LUAU_FASTFLAGVARIABLE(LuauFixIndexerSubtypingOrdering, false) LUAU_FASTFLAGVARIABLE(LuauFixIndexerSubtypingOrdering, false)
@ -607,7 +606,7 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
const NormalizedType* superNorm = normalizer->normalize(superTy); const NormalizedType* superNorm = normalizer->normalize(superTy);
if (!superNorm) if (!superNorm)
return reportError(location, UnificationTooComplex{}); return reportError(location, NormalizationTooComplex{});
if (!log.get<AnyType>(superNorm->tops)) if (!log.get<AnyType>(superNorm->tops))
failure = true; failure = true;
@ -850,32 +849,6 @@ void Unifier::tryUnifyUnionWithType(TypeId subTy, const UnionType* subUnion, Typ
} }
} }
struct DEPRECATED_BlockedTypeFinder : TypeOnceVisitor
{
std::unordered_set<TypeId> blockedTypes;
bool visit(TypeId ty, const BlockedType&) override
{
blockedTypes.insert(ty);
return true;
}
};
bool Unifier::DEPRECATED_blockOnBlockedTypes(TypeId subTy, TypeId superTy)
{
LUAU_ASSERT(!FFlag::LuauNormalizeBlockedTypes);
DEPRECATED_BlockedTypeFinder blockedTypeFinder;
blockedTypeFinder.traverse(subTy);
blockedTypeFinder.traverse(superTy);
if (!blockedTypeFinder.blockedTypes.empty())
{
blockedTypes.insert(end(blockedTypes), begin(blockedTypeFinder.blockedTypes), end(blockedTypeFinder.blockedTypes));
return true;
}
return false;
}
void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionType* uv, bool cacheEnabled, bool isFunctionCall) void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionType* uv, bool cacheEnabled, bool isFunctionCall)
{ {
// T <: A | B if T <: A or T <: B // T <: A | B if T <: A or T <: B
@ -1001,7 +974,7 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
const NormalizedType* superNorm = normalizer->normalize(superTy); const NormalizedType* superNorm = normalizer->normalize(superTy);
Unifier innerState = makeChildUnifier(); Unifier innerState = makeChildUnifier();
if (!subNorm || !superNorm) if (!subNorm || !superNorm)
return reportError(location, UnificationTooComplex{}); return reportError(location, NormalizationTooComplex{});
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption) else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
innerState.tryUnifyNormalizedTypes( innerState.tryUnifyNormalizedTypes(
subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption); subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption);
@ -1016,18 +989,13 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
} }
else if (!found && normalize) else if (!found && normalize)
{ {
// We cannot normalize a type that contains blocked types. We have to
// stop for now if we find any.
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
return;
// It is possible that T <: A | B even though T </: A and T </:B // It is possible that T <: A | B even though T </: A and T </:B
// for example boolean <: true | false. // for example boolean <: true | false.
// We deal with this by type normalization. // We deal with this by type normalization.
const NormalizedType* subNorm = normalizer->normalize(subTy); const NormalizedType* subNorm = normalizer->normalize(subTy);
const NormalizedType* superNorm = normalizer->normalize(superTy); const NormalizedType* superNorm = normalizer->normalize(superTy);
if (!subNorm || !superNorm) if (!subNorm || !superNorm)
reportError(location, UnificationTooComplex{}); reportError(location, NormalizationTooComplex{});
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption) else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption); tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption);
else else
@ -1125,11 +1093,6 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
if (useNewSolver && normalize) if (useNewSolver && normalize)
{ {
// We cannot normalize a type that contains blocked types. We have to
// stop for now if we find any.
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
return;
// Sometimes a negation type is inside one of the types, e.g. { p: number } & { p: ~number }. // Sometimes a negation type is inside one of the types, e.g. { p: number } & { p: ~number }.
NegationTypeFinder finder; NegationTypeFinder finder;
finder.traverse(subTy); finder.traverse(subTy);
@ -1144,7 +1107,7 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
if (subNorm && superNorm) if (subNorm && superNorm)
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the intersection parts are compatible"); tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the intersection parts are compatible");
else else
reportError(location, UnificationTooComplex{}); reportError(location, NormalizationTooComplex{});
return; return;
} }
@ -1190,11 +1153,6 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
reportError(*unificationTooComplex); reportError(*unificationTooComplex);
else if (!found && normalize) else if (!found && normalize)
{ {
// We cannot normalize a type that contains blocked types. We have to
// stop for now if we find any.
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
return;
// It is possible that A & B <: T even though A </: T and B </: T // It is possible that A & B <: T even though A </: T and B </: T
// for example string? & number? <: nil. // for example string? & number? <: nil.
// We deal with this by type normalization. // We deal with this by type normalization.
@ -1203,7 +1161,7 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
if (subNorm && superNorm) if (subNorm && superNorm)
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the intersection parts are compatible"); tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the intersection parts are compatible");
else else
reportError(location, UnificationTooComplex{}); reportError(location, NormalizationTooComplex{});
} }
else if (!found) else if (!found)
{ {
@ -2714,15 +2672,10 @@ void Unifier::tryUnifyNegations(TypeId subTy, TypeId superTy)
if (!log.get<NegationType>(subTy) && !log.get<NegationType>(superTy)) if (!log.get<NegationType>(subTy) && !log.get<NegationType>(superTy))
ice("tryUnifyNegations superTy or subTy must be a negation type"); ice("tryUnifyNegations superTy or subTy must be a negation type");
// We cannot normalize a type that contains blocked types. We have to
// stop for now if we find any.
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
return;
const NormalizedType* subNorm = normalizer->normalize(subTy); const NormalizedType* subNorm = normalizer->normalize(subTy);
const NormalizedType* superNorm = normalizer->normalize(superTy); const NormalizedType* superNorm = normalizer->normalize(superTy);
if (!subNorm || !superNorm) if (!subNorm || !superNorm)
return reportError(location, UnificationTooComplex{}); return reportError(location, NormalizationTooComplex{});
// T </: ~U iff T <: U // T </: ~U iff T <: U
Unifier state = makeChildUnifier(); Unifier state = makeChildUnifier();

View file

@ -2,6 +2,7 @@
#include "Luau/Unifier2.h" #include "Luau/Unifier2.h"
#include "Luau/Instantiation.h"
#include "Luau/Scope.h" #include "Luau/Scope.h"
#include "Luau/Simplify.h" #include "Luau/Simplify.h"
#include "Luau/Substitution.h" #include "Luau/Substitution.h"
@ -9,10 +10,12 @@
#include "Luau/TxnLog.h" #include "Luau/TxnLog.h"
#include "Luau/Type.h" #include "Luau/Type.h"
#include "Luau/TypeArena.h" #include "Luau/TypeArena.h"
#include "Luau/TypeCheckLimits.h"
#include "Luau/TypeUtils.h" #include "Luau/TypeUtils.h"
#include "Luau/VisitType.h" #include "Luau/VisitType.h"
#include <algorithm> #include <algorithm>
#include <optional>
#include <unordered_set> #include <unordered_set>
LUAU_FASTINT(LuauTypeInferRecursionLimit) LUAU_FASTINT(LuauTypeInferRecursionLimit)
@ -20,10 +23,12 @@ LUAU_FASTINT(LuauTypeInferRecursionLimit)
namespace Luau namespace Luau
{ {
Unifier2::Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<InternalErrorReporter> ice) Unifier2::Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<InternalErrorReporter> ice)
: arena(arena) : arena(arena)
, builtinTypes(builtinTypes) , builtinTypes(builtinTypes)
, scope(scope)
, ice(ice) , ice(ice)
, limits(TypeCheckLimits{}) // TODO: typecheck limits in unifier2
, recursionLimit(FInt::LuauTypeInferRecursionLimit) , recursionLimit(FInt::LuauTypeInferRecursionLimit)
{ {
} }
@ -33,6 +38,10 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
subTy = follow(subTy); subTy = follow(subTy);
superTy = follow(superTy); superTy = follow(superTy);
if (seenTypePairings.contains({subTy, superTy}))
return true;
seenTypePairings.insert({subTy, superTy});
if (subTy == superTy) if (subTy == superTy)
return true; return true;
@ -48,20 +57,211 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
if (subFree || superFree) if (subFree || superFree)
return true; return true;
const FunctionType* subFn = get<FunctionType>(subTy); auto subFn = get<FunctionType>(subTy);
const FunctionType* superFn = get<FunctionType>(superTy); auto superFn = get<FunctionType>(superTy);
if (subFn && superFn) if (subFn && superFn)
return unify(subTy, superFn);
auto subUnion = get<UnionType>(subTy);
auto superUnion = get<UnionType>(superTy);
if (subUnion)
return unify(subUnion, superTy);
else if (superUnion)
return unify(subTy, superUnion);
auto subIntersection = get<IntersectionType>(subTy);
auto superIntersection = get<IntersectionType>(superTy);
if (subIntersection)
return unify(subIntersection, superTy);
else if (superIntersection)
return unify(subTy, superIntersection);
auto subNever = get<NeverType>(subTy);
auto superNever = get<NeverType>(superTy);
if (subNever && superNever)
return true;
else if (subNever && superFn)
{ {
bool argResult = unify(superFn->argTypes, subFn->argTypes); // If `never` is the subtype, then we can propagate that inward.
bool retResult = unify(subFn->retTypes, superFn->retTypes); bool argResult = unify(superFn->argTypes, builtinTypes->neverTypePack);
bool retResult = unify(builtinTypes->neverTypePack, superFn->retTypes);
return argResult && retResult;
}
else if (subFn && superNever)
{
// If `never` is the supertype, then we can propagate that inward.
bool argResult = unify(builtinTypes->neverTypePack, subFn->argTypes);
bool retResult = unify(subFn->retTypes, builtinTypes->neverTypePack);
return argResult && retResult; return argResult && retResult;
} }
auto subAny = get<AnyType>(subTy);
auto superAny = get<AnyType>(superTy);
if (subAny && superAny)
return true;
else if (subAny && superFn)
{
// If `any` is the subtype, then we can propagate that inward.
bool argResult = unify(superFn->argTypes, builtinTypes->anyTypePack);
bool retResult = unify(builtinTypes->anyTypePack, superFn->retTypes);
return argResult && retResult;
}
else if (subFn && superAny)
{
// If `any` is the supertype, then we can propagate that inward.
bool argResult = unify(builtinTypes->anyTypePack, subFn->argTypes);
bool retResult = unify(subFn->retTypes, builtinTypes->anyTypePack);
return argResult && retResult;
}
auto subTable = get<TableType>(subTy);
auto superTable = get<TableType>(superTy);
if (subTable && superTable)
{
// `boundTo` works like a bound type, and therefore we'd replace it
// with the `boundTo` and try unification again.
//
// However, these pointers should have been chased already by follow().
LUAU_ASSERT(!subTable->boundTo);
LUAU_ASSERT(!superTable->boundTo);
return unify(subTable, superTable);
}
auto subMetatable = get<MetatableType>(subTy);
auto superMetatable = get<MetatableType>(superTy);
if (subMetatable && superMetatable)
return unify(subMetatable, superMetatable);
else if (subMetatable) // if we only have one metatable, unify with the inner table
return unify(subMetatable->table, superTy);
else if (superMetatable) // if we only have one metatable, unify with the inner table
return unify(subTy, superMetatable->table);
auto [subNegation, superNegation] = get2<NegationType, NegationType>(subTy, superTy);
if (subNegation && superNegation)
return unify(subNegation->ty, superNegation->ty);
// The unification failed, but we're not doing type checking. // The unification failed, but we're not doing type checking.
return true; return true;
} }
bool Unifier2::unify(TypeId subTy, const FunctionType* superFn) {
const FunctionType* subFn = get<FunctionType>(subTy);
bool shouldInstantiate =
(superFn->generics.empty() && !subFn->generics.empty()) || (superFn->genericPacks.empty() && !subFn->genericPacks.empty());
if (shouldInstantiate)
{
std::optional<TypeId> instantiated = instantiate(builtinTypes, arena, NotNull{&limits}, scope, subTy);
subFn = get<FunctionType>(*instantiated);
LUAU_ASSERT(subFn); // instantiation should not make a function type _not_ a function type.
}
bool argResult = unify(superFn->argTypes, subFn->argTypes);
bool retResult = unify(subFn->retTypes, superFn->retTypes);
return argResult && retResult;
}
bool Unifier2::unify(const UnionType* subUnion, TypeId superTy)
{
bool result = true;
// if the occurs check fails for any option, it fails overall
for (auto subOption : subUnion->options)
result &= unify(subOption, superTy);
return result;
}
bool Unifier2::unify(TypeId subTy, const UnionType* superUnion)
{
bool result = true;
// if the occurs check fails for any option, it fails overall
for (auto superOption : superUnion->options)
result &= unify(subTy, superOption);
return result;
}
bool Unifier2::unify(const IntersectionType* subIntersection, TypeId superTy)
{
bool result = true;
// if the occurs check fails for any part, it fails overall
for (auto subPart : subIntersection->parts)
result &= unify(subPart, superTy);
return result;
}
bool Unifier2::unify(TypeId subTy, const IntersectionType* superIntersection)
{
bool result = true;
// if the occurs check fails for any part, it fails overall
for (auto superPart : superIntersection->parts)
result &= unify(subTy, superPart);
return result;
}
bool Unifier2::unify(const TableType* subTable, const TableType* superTable)
{
bool result = true;
// It suffices to only check one direction of properties since we'll only ever have work to do during unification
// if the property is present in both table types.
for (const auto& [propName, subProp] : subTable->props)
{
auto superPropOpt = superTable->props.find(propName);
if (superPropOpt != superTable->props.end())
result &= unify(subProp.type(), superPropOpt->second.type());
}
auto subTypeParamsIter = subTable->instantiatedTypeParams.begin();
auto superTypeParamsIter = superTable->instantiatedTypeParams.begin();
while (subTypeParamsIter != subTable->instantiatedTypeParams.end() && superTypeParamsIter != superTable->instantiatedTypeParams.end())
{
result &= unify(*subTypeParamsIter, *superTypeParamsIter);
subTypeParamsIter++;
superTypeParamsIter++;
}
auto subTypePackParamsIter = subTable->instantiatedTypePackParams.begin();
auto superTypePackParamsIter = superTable->instantiatedTypePackParams.begin();
while (subTypePackParamsIter != subTable->instantiatedTypePackParams.end() &&
superTypePackParamsIter != superTable->instantiatedTypePackParams.end())
{
result &= unify(*subTypePackParamsIter, *superTypePackParamsIter);
subTypePackParamsIter++;
superTypePackParamsIter++;
}
if (subTable->selfTy && superTable->selfTy)
result &= unify(*subTable->selfTy, *superTable->selfTy);
if (subTable->indexer && superTable->indexer)
{
result &= unify(subTable->indexer->indexType, superTable->indexer->indexType);
result &= unify(subTable->indexer->indexResultType, superTable->indexer->indexResultType);
}
return result;
}
bool Unifier2::unify(const MetatableType* subMetatable, const MetatableType* superMetatable)
{
return unify(subMetatable->metatable, superMetatable->metatable) && unify(subMetatable->table, superMetatable->table);
}
// FIXME? This should probably return an ErrorVec or an optional<TypeError> // FIXME? This should probably return an ErrorVec or an optional<TypeError>
// rather than a boolean to signal an occurs check failure. // rather than a boolean to signal an occurs check failure.
bool Unifier2::unify(TypePackId subTp, TypePackId superTp) bool Unifier2::unify(TypePackId subTp, TypePackId superTp)
@ -69,6 +269,10 @@ bool Unifier2::unify(TypePackId subTp, TypePackId superTp)
subTp = follow(subTp); subTp = follow(subTp);
superTp = follow(superTp); superTp = follow(superTp);
if (seenTypePackPairings.contains({subTp, superTp}))
return true;
seenTypePackPairings.insert({subTp, superTp});
const FreeTypePack* subFree = get<FreeTypePack>(subTp); const FreeTypePack* subFree = get<FreeTypePack>(subTp);
const FreeTypePack* superFree = get<FreeTypePack>(superTp); const FreeTypePack* superFree = get<FreeTypePack>(superTp);
@ -103,12 +307,28 @@ bool Unifier2::unify(TypePackId subTp, TypePackId superTp)
auto [subTypes, subTail] = extendTypePack(*arena, builtinTypes, subTp, maxLength); auto [subTypes, subTail] = extendTypePack(*arena, builtinTypes, subTp, maxLength);
auto [superTypes, superTail] = extendTypePack(*arena, builtinTypes, superTp, maxLength); auto [superTypes, superTail] = extendTypePack(*arena, builtinTypes, superTp, maxLength);
// right-pad the subpack with nils if `superPack` is larger since that's what a function call does
if (subTypes.size() < maxLength)
{
for (size_t i = 0; i <= maxLength - subTypes.size(); i++)
subTypes.push_back(builtinTypes->nilType);
}
if (subTypes.size() < maxLength || superTypes.size() < maxLength) if (subTypes.size() < maxLength || superTypes.size() < maxLength)
return true; return true;
for (size_t i = 0; i < maxLength; ++i) for (size_t i = 0; i < maxLength; ++i)
unify(subTypes[i], superTypes[i]); unify(subTypes[i], superTypes[i]);
if (!subTail || !superTail)
return true;
TypePackId followedSubTail = follow(*subTail);
TypePackId followedSuperTail = follow(*superTail);
if (get<FreeTypePack>(followedSubTail) || get<FreeTypePack>(followedSuperTail))
return unify(followedSubTail, followedSuperTail);
return true; return true;
} }
@ -141,8 +361,8 @@ struct FreeTypeSearcher : TypeVisitor
} }
} }
std::unordered_set<TypeId> negativeTypes; DenseHashMap<TypeId, size_t> negativeTypes{0};
std::unordered_set<TypeId> positiveTypes; DenseHashMap<TypeId, size_t> positiveTypes{0};
bool visit(TypeId ty) override bool visit(TypeId ty) override
{ {
@ -158,16 +378,34 @@ struct FreeTypeSearcher : TypeVisitor
switch (polarity) switch (polarity)
{ {
case Positive: case Positive:
positiveTypes.insert(ty); positiveTypes[ty]++;
break; break;
case Negative: case Negative:
negativeTypes.insert(ty); negativeTypes[ty]++;
break; break;
} }
return true; return true;
} }
bool visit(TypeId ty, const TableType& tt) override
{
if ((tt.state == TableState::Free || tt.state == TableState::Unsealed) && subsumes(scope, tt.scope))
{
switch (polarity)
{
case Positive:
positiveTypes[ty]++;
break;
case Negative:
negativeTypes[ty]++;
break;
}
}
return true;
}
bool visit(TypeId ty, const FunctionType& ft) override bool visit(TypeId ty, const FunctionType& ft) override
{ {
flip(); flip();
@ -185,14 +423,15 @@ struct MutatingGeneralizer : TypeOnceVisitor
NotNull<BuiltinTypes> builtinTypes; NotNull<BuiltinTypes> builtinTypes;
NotNull<Scope> scope; NotNull<Scope> scope;
std::unordered_set<TypeId> positiveTypes; DenseHashMap<TypeId, size_t> positiveTypes;
std::unordered_set<TypeId> negativeTypes; DenseHashMap<TypeId, size_t> negativeTypes;
std::vector<TypeId> generics; std::vector<TypeId> generics;
std::vector<TypePackId> genericPacks;
bool isWithinFunction = false; bool isWithinFunction = false;
MutatingGeneralizer( MutatingGeneralizer(
NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, std::unordered_set<TypeId> positiveTypes, std::unordered_set<TypeId> negativeTypes) NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, DenseHashMap<TypeId, size_t> positiveTypes, DenseHashMap<TypeId, size_t> negativeTypes)
: TypeOnceVisitor(/* skipBoundTypes */ true) : TypeOnceVisitor(/* skipBoundTypes */ true)
, builtinTypes(builtinTypes) , builtinTypes(builtinTypes)
, scope(scope) , scope(scope)
@ -263,10 +502,10 @@ struct MutatingGeneralizer : TypeOnceVisitor
if (!ft) if (!ft)
return false; return false;
const bool isPositive = positiveTypes.count(ty); const bool positiveCount = getCount(positiveTypes, ty);
const bool isNegative = negativeTypes.count(ty); const bool negativeCount = getCount(negativeTypes, ty);
if (!isPositive && !isNegative) if (!positiveCount && !negativeCount)
return false; return false;
const bool hasLowerBound = !get<NeverType>(follow(ft->lowerBound)); const bool hasLowerBound = !get<NeverType>(follow(ft->lowerBound));
@ -277,13 +516,13 @@ struct MutatingGeneralizer : TypeOnceVisitor
if (!hasLowerBound && !hasUpperBound) if (!hasLowerBound && !hasUpperBound)
{ {
if (isWithinFunction) if (!isWithinFunction || (positiveCount + negativeCount == 1))
emplaceType<BoundType>(asMutable(ty), builtinTypes->unknownType);
else
{ {
emplaceType<GenericType>(asMutable(ty), scope); emplaceType<GenericType>(asMutable(ty), scope);
generics.push_back(ty); generics.push_back(ty);
} }
else
emplaceType<BoundType>(asMutable(ty), builtinTypes->unknownType);
} }
// It is possible that this free type has other free types in its upper // It is possible that this free type has other free types in its upper
@ -292,7 +531,7 @@ struct MutatingGeneralizer : TypeOnceVisitor
// bound). // bound).
// //
// If we do not do this, we get tautological bounds like a <: a <: unknown. // If we do not do this, we get tautological bounds like a <: a <: unknown.
else if (isPositive && !hasUpperBound) else if (positiveCount && !hasUpperBound)
{ {
TypeId lb = follow(ft->lowerBound); TypeId lb = follow(ft->lowerBound);
if (FreeType* lowerFree = getMutable<FreeType>(lb); lowerFree && lowerFree->upperBound == ty) if (FreeType* lowerFree = getMutable<FreeType>(lb); lowerFree && lowerFree->upperBound == ty)
@ -319,9 +558,56 @@ struct MutatingGeneralizer : TypeOnceVisitor
return false; return false;
} }
size_t getCount(const DenseHashMap<TypeId, size_t>& map, TypeId ty)
{
if (const size_t* count = map.find(ty))
return *count;
else
return 0;
}
bool visit(TypeId ty, const TableType&) override
{
const size_t positiveCount = getCount(positiveTypes, ty);
const size_t negativeCount = getCount(negativeTypes, ty);
// FIXME: Free tables should probably just be replaced by upper bounds on free types.
//
// eg never <: 'a <: {x: number} & {z: boolean}
if (!positiveCount && !negativeCount)
return true;
TableType* tt = getMutable<TableType>(ty);
LUAU_ASSERT(tt);
// We only unseal tables if they occur within function argument or
// return lists. In principle, we could always seal tables when
// generalizing, but that would mean that we'd lose the ability to
// report the existence of unsealed tables via things like hovertype.
if (tt->state == TableState::Unsealed && !isWithinFunction)
return true;
tt->state = TableState::Sealed;
return true;
}
bool visit(TypePackId tp, const FreeTypePack& ftp) override
{
if (!subsumes(scope, ftp.scope))
return true;
asMutable(tp)->ty.emplace<GenericTypePack>(scope);
genericPacks.push_back(tp);
return true;
}
}; };
std::optional<TypeId> Unifier2::generalize(NotNull<Scope> scope, TypeId ty) std::optional<TypeId> Unifier2::generalize(TypeId ty)
{ {
ty = follow(ty); ty = follow(ty);
@ -345,7 +631,10 @@ std::optional<TypeId> Unifier2::generalize(NotNull<Scope> scope, TypeId ty)
FunctionType* ftv = getMutable<FunctionType>(follow(*res)); FunctionType* ftv = getMutable<FunctionType>(follow(*res));
if (ftv) if (ftv)
{
ftv->generics = std::move(gen.generics); ftv->generics = std::move(gen.generics);
ftv->genericPacks = std::move(gen.genericPacks);
}
return res; return res;
} }

View file

@ -220,6 +220,8 @@ if(LUAU_BUILD_TESTS)
target_compile_options(Luau.Conformance PRIVATE ${LUAU_OPTIONS}) target_compile_options(Luau.Conformance PRIVATE ${LUAU_OPTIONS})
target_include_directories(Luau.Conformance PRIVATE extern) target_include_directories(Luau.Conformance PRIVATE extern)
target_link_libraries(Luau.Conformance PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen Luau.VM) target_link_libraries(Luau.Conformance PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen Luau.VM)
file(REAL_PATH "tests/conformance" LUAU_CONFORMANCE_SOURCE_DIR)
target_compile_definitions(Luau.Conformance PRIVATE LUAU_CONFORMANCE_SOURCE_DIR="${LUAU_CONFORMANCE_SOURCE_DIR}")
target_compile_options(Luau.CLI.Test PRIVATE ${LUAU_OPTIONS}) target_compile_options(Luau.CLI.Test PRIVATE ${LUAU_OPTIONS})
target_include_directories(Luau.CLI.Test PRIVATE extern CLI) target_include_directories(Luau.CLI.Test PRIVATE extern CLI)

View file

@ -99,6 +99,7 @@ struct ScopedRegX64
ScopedRegX64(const ScopedRegX64&) = delete; ScopedRegX64(const ScopedRegX64&) = delete;
ScopedRegX64& operator=(const ScopedRegX64&) = delete; ScopedRegX64& operator=(const ScopedRegX64&) = delete;
void take(RegisterX64 reg);
void alloc(SizeX64 size); void alloc(SizeX64 size);
void free(); void free();

View file

@ -0,0 +1,224 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#pragma once
#include "Luau/Common.h"
#include "Luau/IrData.h"
namespace Luau
{
namespace CodeGen
{
template<typename T>
static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& inst)
{
// For correct analysis, all instruction uses must be handled before handling the definitions
switch (inst.cmd)
{
case IrCmd::LOAD_TAG:
case IrCmd::LOAD_POINTER:
case IrCmd::LOAD_DOUBLE:
case IrCmd::LOAD_INT:
case IrCmd::LOAD_TVALUE:
visitor.maybeUse(inst.a); // Argument can also be a VmConst
break;
case IrCmd::STORE_TAG:
case IrCmd::STORE_POINTER:
case IrCmd::STORE_DOUBLE:
case IrCmd::STORE_INT:
case IrCmd::STORE_VECTOR:
case IrCmd::STORE_TVALUE:
case IrCmd::STORE_SPLIT_TVALUE:
visitor.maybeDef(inst.a); // Argument can also be a pointer value
break;
case IrCmd::CMP_ANY:
visitor.use(inst.a);
visitor.use(inst.b);
break;
case IrCmd::JUMP_IF_TRUTHY:
case IrCmd::JUMP_IF_FALSY:
visitor.use(inst.a);
break;
// A <- B, C
case IrCmd::DO_ARITH:
case IrCmd::GET_TABLE:
visitor.use(inst.b);
visitor.maybeUse(inst.c); // Argument can also be a VmConst
visitor.def(inst.a);
break;
case IrCmd::SET_TABLE:
visitor.use(inst.a);
visitor.use(inst.b);
visitor.maybeUse(inst.c); // Argument can also be a VmConst
break;
// A <- B
case IrCmd::DO_LEN:
visitor.use(inst.b);
visitor.def(inst.a);
break;
case IrCmd::GET_IMPORT:
visitor.def(inst.a);
break;
case IrCmd::CONCAT:
visitor.useRange(vmRegOp(inst.a), function.uintOp(inst.b));
visitor.defRange(vmRegOp(inst.a), function.uintOp(inst.b));
break;
case IrCmd::GET_UPVALUE:
visitor.def(inst.a);
break;
case IrCmd::SET_UPVALUE:
visitor.use(inst.b);
break;
case IrCmd::INTERRUPT:
break;
case IrCmd::BARRIER_OBJ:
case IrCmd::BARRIER_TABLE_FORWARD:
visitor.maybeUse(inst.b);
break;
case IrCmd::CLOSE_UPVALS:
// Closing an upvalue should be counted as a register use (it copies the fresh register value)
// But we lack the required information about the specific set of registers that are affected
// Because we don't plan to optimize captured registers atm, we skip full dataflow analysis for them right now
break;
case IrCmd::CAPTURE:
visitor.maybeUse(inst.a);
if (function.uintOp(inst.b) == 1)
visitor.capture(vmRegOp(inst.a));
break;
case IrCmd::SETLIST:
visitor.use(inst.b);
visitor.useRange(vmRegOp(inst.c), function.intOp(inst.d));
break;
case IrCmd::CALL:
visitor.use(inst.a);
visitor.useRange(vmRegOp(inst.a) + 1, function.intOp(inst.b));
visitor.defRange(vmRegOp(inst.a), function.intOp(inst.c));
break;
case IrCmd::RETURN:
visitor.useRange(vmRegOp(inst.a), function.intOp(inst.b));
break;
// TODO: FASTCALL is more restrictive than INVOKE_FASTCALL; we should either determine the exact semantics, or rework it
case IrCmd::FASTCALL:
case IrCmd::INVOKE_FASTCALL:
if (int count = function.intOp(inst.e); count != -1)
{
if (count >= 3)
{
LUAU_ASSERT(inst.d.kind == IrOpKind::VmReg && vmRegOp(inst.d) == vmRegOp(inst.c) + 1);
visitor.useRange(vmRegOp(inst.c), count);
}
else
{
if (count >= 1)
visitor.use(inst.c);
if (count >= 2)
visitor.maybeUse(inst.d); // Argument can also be a VmConst
}
}
else
{
visitor.useVarargs(vmRegOp(inst.c));
}
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
if (int count = function.intOp(inst.f); count != -1)
visitor.defRange(vmRegOp(inst.b), count);
break;
case IrCmd::FORGLOOP:
// First register is not used by instruction, we check that it's still 'nil' with CHECK_TAG
visitor.use(inst.a, 1);
visitor.use(inst.a, 2);
visitor.def(inst.a, 2);
visitor.defRange(vmRegOp(inst.a) + 3, function.intOp(inst.b));
break;
case IrCmd::FORGLOOP_FALLBACK:
visitor.useRange(vmRegOp(inst.a), 3);
visitor.def(inst.a, 2);
visitor.defRange(vmRegOp(inst.a) + 3, uint8_t(function.intOp(inst.b))); // ignore most significant bit
break;
case IrCmd::FORGPREP_XNEXT_FALLBACK:
visitor.use(inst.b);
break;
case IrCmd::FALLBACK_GETGLOBAL:
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_SETGLOBAL:
visitor.use(inst.b);
break;
case IrCmd::FALLBACK_GETTABLEKS:
visitor.use(inst.c);
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_SETTABLEKS:
visitor.use(inst.b);
visitor.use(inst.c);
break;
case IrCmd::FALLBACK_NAMECALL:
visitor.use(inst.c);
visitor.defRange(vmRegOp(inst.b), 2);
break;
case IrCmd::FALLBACK_PREPVARARGS:
// No effect on explicitly referenced registers
break;
case IrCmd::FALLBACK_GETVARARGS:
visitor.defRange(vmRegOp(inst.b), function.intOp(inst.c));
break;
case IrCmd::FALLBACK_DUPCLOSURE:
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_FORGPREP:
visitor.use(inst.b);
visitor.defRange(vmRegOp(inst.b), 3);
break;
case IrCmd::ADJUST_STACK_TO_REG:
visitor.defRange(vmRegOp(inst.a), -1);
break;
case IrCmd::ADJUST_STACK_TO_TOP:
// While this can be considered to be a vararg consumer, it is already handled in fastcall instructions
break;
case IrCmd::GET_TYPEOF:
visitor.use(inst.a);
break;
case IrCmd::FINDUPVAL:
visitor.use(inst.a);
break;
default:
// All instructions which reference registers have to be handled explicitly
LUAU_ASSERT(inst.a.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.b.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.c.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.d.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.e.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.f.kind != IrOpKind::VmReg);
break;
}
}
template<typename T>
static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrBlock& block)
{
for (uint32_t instIdx = block.start; instIdx <= block.finish; instIdx++)
{
IrInst& inst = function.instructions[instIdx];
visitVmRegDefsUses(visitor, function, inst);
}
}
} // namespace CodeGen
} // namespace Luau

View file

@ -365,16 +365,9 @@ const Instruction* executeGETTABLEKS(lua_State* L, const Instruction* pc, StkId
{ {
Table* h = hvalue(rb); Table* h = hvalue(rb);
int slot = LUAU_INSN_C(insn) & h->nodemask8; // we ignore the fast path that checks for the cached slot since IrTranslation already checks for it.
LuaNode* n = &h->node[slot];
// fast-path: value is in expected slot if (!h->metatable)
if (LUAU_LIKELY(ttisstring(gkey(n)) && tsvalue(gkey(n)) == tsvalue(kv) && !ttisnil(gval(n))))
{
setobj2s(L, ra, gval(n));
return pc;
}
else if (!h->metatable)
{ {
// fast-path: value is not in expected slot, but the table lookup doesn't involve metatable // fast-path: value is not in expected slot, but the table lookup doesn't involve metatable
const TValue* res = luaH_getstr(h, tsvalue(kv)); const TValue* res = luaH_getstr(h, tsvalue(kv));
@ -392,6 +385,7 @@ const Instruction* executeGETTABLEKS(lua_State* L, const Instruction* pc, StkId
else else
{ {
// slow-path, may invoke Lua calls via __index metamethod // slow-path, may invoke Lua calls via __index metamethod
int slot = LUAU_INSN_C(insn) & h->nodemask8;
L->cachedslot = slot; L->cachedslot = slot;
VM_PROTECT(luaV_gettable(L, rb, kv, ra)); VM_PROTECT(luaV_gettable(L, rb, kv, ra));
// save cachedslot to accelerate future lookups; patches currently executing instruction since pc-2 rolls back two pc++ // save cachedslot to accelerate future lookups; patches currently executing instruction since pc-2 rolls back two pc++

View file

@ -4,6 +4,7 @@
#include "Luau/DenseHash.h" #include "Luau/DenseHash.h"
#include "Luau/IrData.h" #include "Luau/IrData.h"
#include "Luau/IrUtils.h" #include "Luau/IrUtils.h"
#include "Luau/IrVisitUseDef.h"
#include "lobject.h" #include "lobject.h"
@ -186,211 +187,6 @@ void requireVariadicSequence(RegisterSet& sourceRs, const RegisterSet& defRs, ui
} }
} }
template<typename T>
static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrBlock& block)
{
for (uint32_t instIdx = block.start; instIdx <= block.finish; instIdx++)
{
IrInst& inst = function.instructions[instIdx];
// For correct analysis, all instruction uses must be handled before handling the definitions
switch (inst.cmd)
{
case IrCmd::LOAD_TAG:
case IrCmd::LOAD_POINTER:
case IrCmd::LOAD_DOUBLE:
case IrCmd::LOAD_INT:
case IrCmd::LOAD_TVALUE:
visitor.maybeUse(inst.a); // Argument can also be a VmConst
break;
case IrCmd::STORE_TAG:
case IrCmd::STORE_POINTER:
case IrCmd::STORE_DOUBLE:
case IrCmd::STORE_INT:
case IrCmd::STORE_VECTOR:
case IrCmd::STORE_TVALUE:
case IrCmd::STORE_SPLIT_TVALUE:
visitor.maybeDef(inst.a); // Argument can also be a pointer value
break;
case IrCmd::CMP_ANY:
visitor.use(inst.a);
visitor.use(inst.b);
break;
case IrCmd::JUMP_IF_TRUTHY:
case IrCmd::JUMP_IF_FALSY:
visitor.use(inst.a);
break;
// A <- B, C
case IrCmd::DO_ARITH:
case IrCmd::GET_TABLE:
visitor.use(inst.b);
visitor.maybeUse(inst.c); // Argument can also be a VmConst
visitor.def(inst.a);
break;
case IrCmd::SET_TABLE:
visitor.use(inst.a);
visitor.use(inst.b);
visitor.maybeUse(inst.c); // Argument can also be a VmConst
break;
// A <- B
case IrCmd::DO_LEN:
visitor.use(inst.b);
visitor.def(inst.a);
break;
case IrCmd::GET_IMPORT:
visitor.def(inst.a);
break;
case IrCmd::CONCAT:
visitor.useRange(vmRegOp(inst.a), function.uintOp(inst.b));
visitor.defRange(vmRegOp(inst.a), function.uintOp(inst.b));
break;
case IrCmd::GET_UPVALUE:
visitor.def(inst.a);
break;
case IrCmd::SET_UPVALUE:
visitor.use(inst.b);
break;
case IrCmd::INTERRUPT:
break;
case IrCmd::BARRIER_OBJ:
case IrCmd::BARRIER_TABLE_FORWARD:
visitor.maybeUse(inst.b);
break;
case IrCmd::CLOSE_UPVALS:
// Closing an upvalue should be counted as a register use (it copies the fresh register value)
// But we lack the required information about the specific set of registers that are affected
// Because we don't plan to optimize captured registers atm, we skip full dataflow analysis for them right now
break;
case IrCmd::CAPTURE:
visitor.maybeUse(inst.a);
if (function.uintOp(inst.b) == 1)
visitor.capture(vmRegOp(inst.a));
break;
case IrCmd::SETLIST:
visitor.use(inst.b);
visitor.useRange(vmRegOp(inst.c), function.intOp(inst.d));
break;
case IrCmd::CALL:
visitor.use(inst.a);
visitor.useRange(vmRegOp(inst.a) + 1, function.intOp(inst.b));
visitor.defRange(vmRegOp(inst.a), function.intOp(inst.c));
break;
case IrCmd::RETURN:
visitor.useRange(vmRegOp(inst.a), function.intOp(inst.b));
break;
// TODO: FASTCALL is more restrictive than INVOKE_FASTCALL; we should either determine the exact semantics, or rework it
case IrCmd::FASTCALL:
case IrCmd::INVOKE_FASTCALL:
if (int count = function.intOp(inst.e); count != -1)
{
if (count >= 3)
{
LUAU_ASSERT(inst.d.kind == IrOpKind::VmReg && vmRegOp(inst.d) == vmRegOp(inst.c) + 1);
visitor.useRange(vmRegOp(inst.c), count);
}
else
{
if (count >= 1)
visitor.use(inst.c);
if (count >= 2)
visitor.maybeUse(inst.d); // Argument can also be a VmConst
}
}
else
{
visitor.useVarargs(vmRegOp(inst.c));
}
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
if (int count = function.intOp(inst.f); count != -1)
visitor.defRange(vmRegOp(inst.b), count);
break;
case IrCmd::FORGLOOP:
// First register is not used by instruction, we check that it's still 'nil' with CHECK_TAG
visitor.use(inst.a, 1);
visitor.use(inst.a, 2);
visitor.def(inst.a, 2);
visitor.defRange(vmRegOp(inst.a) + 3, function.intOp(inst.b));
break;
case IrCmd::FORGLOOP_FALLBACK:
visitor.useRange(vmRegOp(inst.a), 3);
visitor.def(inst.a, 2);
visitor.defRange(vmRegOp(inst.a) + 3, uint8_t(function.intOp(inst.b))); // ignore most significant bit
break;
case IrCmd::FORGPREP_XNEXT_FALLBACK:
visitor.use(inst.b);
break;
case IrCmd::FALLBACK_GETGLOBAL:
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_SETGLOBAL:
visitor.use(inst.b);
break;
case IrCmd::FALLBACK_GETTABLEKS:
visitor.use(inst.c);
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_SETTABLEKS:
visitor.use(inst.b);
visitor.use(inst.c);
break;
case IrCmd::FALLBACK_NAMECALL:
visitor.use(inst.c);
visitor.defRange(vmRegOp(inst.b), 2);
break;
case IrCmd::FALLBACK_PREPVARARGS:
// No effect on explicitly referenced registers
break;
case IrCmd::FALLBACK_GETVARARGS:
visitor.defRange(vmRegOp(inst.b), function.intOp(inst.c));
break;
case IrCmd::FALLBACK_DUPCLOSURE:
visitor.def(inst.b);
break;
case IrCmd::FALLBACK_FORGPREP:
visitor.use(inst.b);
visitor.defRange(vmRegOp(inst.b), 3);
break;
case IrCmd::ADJUST_STACK_TO_REG:
visitor.defRange(vmRegOp(inst.a), -1);
break;
case IrCmd::ADJUST_STACK_TO_TOP:
// While this can be considered to be a vararg consumer, it is already handled in fastcall instructions
break;
case IrCmd::GET_TYPEOF:
visitor.use(inst.a);
break;
case IrCmd::FINDUPVAL:
visitor.use(inst.a);
break;
default:
// All instructions which reference registers have to be handled explicitly
LUAU_ASSERT(inst.a.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.b.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.c.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.d.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.e.kind != IrOpKind::VmReg);
LUAU_ASSERT(inst.f.kind != IrOpKind::VmReg);
break;
}
}
}
struct BlockVmRegLiveInComputation struct BlockVmRegLiveInComputation
{ {
BlockVmRegLiveInComputation(RegisterSet& defRs, std::bitset<256>& capturedRegs) BlockVmRegLiveInComputation(RegisterSet& defRs, std::bitset<256>& capturedRegs)

View file

@ -1431,77 +1431,142 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
break; break;
case IrCmd::BITLSHIFT_UINT: case IrCmd::BITLSHIFT_UINT:
{ {
ScopedRegX64 shiftTmp{regs};
// Custom bit shift value can only be placed in cl // Custom bit shift value can only be placed in cl
ScopedRegX64 shiftTmp{regs, regs.takeReg(ecx, kInvalidInstIdx)}; // but we use it if the shift value is not a constant stored in b
if (inst.b.kind != IrOpKind::Constant)
shiftTmp.take(ecx);
inst.regX64 = regs.allocReg(SizeX64::dword, index); inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a});
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a)) if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a))
build.mov(inst.regX64, memRegUintOp(inst.a)); build.mov(inst.regX64, memRegUintOp(inst.a));
build.shl(inst.regX64, byteReg(shiftTmp.reg)); if (inst.b.kind == IrOpKind::Constant)
{
// if shift value is a constant, we extract the byte-sized shift amount
int8_t shift = int8_t(unsigned(intOp(inst.b)));
build.shl(inst.regX64, shift);
}
else
{
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
build.shl(inst.regX64, byteReg(shiftTmp.reg));
}
break; break;
} }
case IrCmd::BITRSHIFT_UINT: case IrCmd::BITRSHIFT_UINT:
{ {
ScopedRegX64 shiftTmp{regs};
// Custom bit shift value can only be placed in cl // Custom bit shift value can only be placed in cl
ScopedRegX64 shiftTmp{regs, regs.takeReg(ecx, kInvalidInstIdx)}; // but we use it if the shift value is not a constant stored in b
if (inst.b.kind != IrOpKind::Constant)
shiftTmp.take(ecx);
inst.regX64 = regs.allocReg(SizeX64::dword, index); inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a});
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a)) if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a))
build.mov(inst.regX64, memRegUintOp(inst.a)); build.mov(inst.regX64, memRegUintOp(inst.a));
build.shr(inst.regX64, byteReg(shiftTmp.reg)); if (inst.b.kind == IrOpKind::Constant)
{
// if shift value is a constant, we extract the byte-sized shift amount
int8_t shift = int8_t(unsigned(intOp(inst.b)));
build.shr(inst.regX64, shift);
}
else
{
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
build.shr(inst.regX64, byteReg(shiftTmp.reg));
}
break; break;
} }
case IrCmd::BITARSHIFT_UINT: case IrCmd::BITARSHIFT_UINT:
{ {
ScopedRegX64 shiftTmp{regs};
// Custom bit shift value can only be placed in cl // Custom bit shift value can only be placed in cl
ScopedRegX64 shiftTmp{regs, regs.takeReg(ecx, kInvalidInstIdx)}; // but we use it if the shift value is not a constant stored in b
if (inst.b.kind != IrOpKind::Constant)
shiftTmp.take(ecx);
inst.regX64 = regs.allocReg(SizeX64::dword, index); inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a});
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a)) if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a))
build.mov(inst.regX64, memRegUintOp(inst.a)); build.mov(inst.regX64, memRegUintOp(inst.a));
build.sar(inst.regX64, byteReg(shiftTmp.reg)); if (inst.b.kind == IrOpKind::Constant)
{
// if shift value is a constant, we extract the byte-sized shift amount
int8_t shift = int8_t(unsigned(intOp(inst.b)));
build.sar(inst.regX64, shift);
}
else
{
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
build.sar(inst.regX64, byteReg(shiftTmp.reg));
}
break; break;
} }
case IrCmd::BITLROTATE_UINT: case IrCmd::BITLROTATE_UINT:
{ {
ScopedRegX64 shiftTmp{regs};
// Custom bit shift value can only be placed in cl // Custom bit shift value can only be placed in cl
ScopedRegX64 shiftTmp{regs, regs.takeReg(ecx, kInvalidInstIdx)}; // but we use it if the shift value is not a constant stored in b
if (inst.b.kind != IrOpKind::Constant)
shiftTmp.take(ecx);
inst.regX64 = regs.allocReg(SizeX64::dword, index); inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a});
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a)) if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a))
build.mov(inst.regX64, memRegUintOp(inst.a)); build.mov(inst.regX64, memRegUintOp(inst.a));
build.rol(inst.regX64, byteReg(shiftTmp.reg)); if (inst.b.kind == IrOpKind::Constant)
{
// if shift value is a constant, we extract the byte-sized shift amount
int8_t shift = int8_t(unsigned(intOp(inst.b)));
build.rol(inst.regX64, shift);
}
else
{
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
build.rol(inst.regX64, byteReg(shiftTmp.reg));
}
break; break;
} }
case IrCmd::BITRROTATE_UINT: case IrCmd::BITRROTATE_UINT:
{ {
ScopedRegX64 shiftTmp{regs};
// Custom bit shift value can only be placed in cl // Custom bit shift value can only be placed in cl
ScopedRegX64 shiftTmp{regs, regs.takeReg(ecx, kInvalidInstIdx)}; // but we use it if the shift value is not a constant stored in b
if (inst.b.kind != IrOpKind::Constant)
shiftTmp.take(ecx);
inst.regX64 = regs.allocReg(SizeX64::dword, index); inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a});
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a)) if (inst.a.kind != IrOpKind::Inst || inst.regX64 != regOp(inst.a))
build.mov(inst.regX64, memRegUintOp(inst.a)); build.mov(inst.regX64, memRegUintOp(inst.a));
build.ror(inst.regX64, byteReg(shiftTmp.reg)); if (inst.b.kind == IrOpKind::Constant)
{
// if shift value is a constant, we extract the byte-sized shift amount
int8_t shift = int8_t(unsigned(intOp(inst.b)));
build.ror(inst.regX64, shift);
}
else
{
build.mov(shiftTmp.reg, memRegUintOp(inst.b));
build.ror(inst.regX64, byteReg(shiftTmp.reg));
}
break; break;
} }
case IrCmd::BITCOUNTLZ_UINT: case IrCmd::BITCOUNTLZ_UINT:

View file

@ -463,6 +463,12 @@ ScopedRegX64::~ScopedRegX64()
owner.freeReg(reg); owner.freeReg(reg);
} }
void ScopedRegX64::take(RegisterX64 reg)
{
LUAU_ASSERT(this->reg == noreg);
this->reg = owner.takeReg(reg, kInvalidInstIdx);
}
void ScopedRegX64::alloc(SizeX64 size) void ScopedRegX64::alloc(SizeX64 size)
{ {
LUAU_ASSERT(reg == noreg); LUAU_ASSERT(reg == noreg);

View file

@ -13,6 +13,7 @@
#include "ltm.h" #include "ltm.h"
LUAU_FASTFLAGVARIABLE(LuauImproveForN, false) LUAU_FASTFLAGVARIABLE(LuauImproveForN, false)
LUAU_FASTFLAG(LuauReduceStackSpills)
namespace Luau namespace Luau
{ {
@ -1384,36 +1385,74 @@ void translateInstNewClosure(IrBuilder& build, const Instruction* pc, int pcpos)
Instruction uinsn = pc[ui + 1]; Instruction uinsn = pc[ui + 1];
LUAU_ASSERT(LUAU_INSN_OP(uinsn) == LOP_CAPTURE); LUAU_ASSERT(LUAU_INSN_OP(uinsn) == LOP_CAPTURE);
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui)); if (FFlag::LuauReduceStackSpills)
{
switch (LUAU_INSN_A(uinsn))
{
case LCT_VAL:
{
IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn)));
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
build.inst(IrCmd::STORE_TVALUE, dst, src);
break;
}
switch (LUAU_INSN_A(uinsn)) case LCT_REF:
{ {
case LCT_VAL: IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn)));
{ IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn))); build.inst(IrCmd::STORE_POINTER, dst, src);
build.inst(IrCmd::STORE_TVALUE, dst, src); build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
break; break;
}
case LCT_UPVAL:
{
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn)));
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src);
build.inst(IrCmd::STORE_TVALUE, dst, load);
break;
}
default:
LUAU_ASSERT(!"Unknown upvalue capture type");
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks
}
} }
else
case LCT_REF:
{ {
IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn))); IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
build.inst(IrCmd::STORE_POINTER, dst, src);
build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
break;
}
case LCT_UPVAL: switch (LUAU_INSN_A(uinsn))
{ {
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn))); case LCT_VAL:
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src); {
build.inst(IrCmd::STORE_TVALUE, dst, load); IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn)));
break; build.inst(IrCmd::STORE_TVALUE, dst, src);
} break;
}
default: case LCT_REF:
LUAU_ASSERT(!"Unknown upvalue capture type"); {
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn)));
build.inst(IrCmd::STORE_POINTER, dst, src);
build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
break;
}
case LCT_UPVAL:
{
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn)));
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src);
build.inst(IrCmd::STORE_TVALUE, dst, load);
break;
}
default:
LUAU_ASSERT(!"Unknown upvalue capture type");
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks
}
} }
} }

View file

@ -1,6 +1,10 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details // This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "IrValueLocationTracking.h" #include "IrValueLocationTracking.h"
#include "Luau/IrUtils.h"
LUAU_FASTFLAGVARIABLE(LuauReduceStackSpills, false)
namespace Luau namespace Luau
{ {
namespace CodeGen namespace CodeGen
@ -23,13 +27,16 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
switch (inst.cmd) switch (inst.cmd)
{ {
case IrCmd::STORE_TAG: case IrCmd::STORE_TAG:
// Tag update is a bit tricky, restore operations of values are not affected
invalidateRestoreOp(inst.a, /*skipValueInvalidation*/ true);
break;
case IrCmd::STORE_POINTER: case IrCmd::STORE_POINTER:
case IrCmd::STORE_DOUBLE: case IrCmd::STORE_DOUBLE:
case IrCmd::STORE_INT: case IrCmd::STORE_INT:
case IrCmd::STORE_VECTOR: case IrCmd::STORE_VECTOR:
case IrCmd::STORE_TVALUE: case IrCmd::STORE_TVALUE:
case IrCmd::STORE_SPLIT_TVALUE: case IrCmd::STORE_SPLIT_TVALUE:
invalidateRestoreOp(inst.a); invalidateRestoreOp(inst.a, /*skipValueInvalidation*/ false);
break; break;
case IrCmd::ADJUST_STACK_TO_REG: case IrCmd::ADJUST_STACK_TO_REG:
invalidateRestoreVmRegs(vmRegOp(inst.a), -1); invalidateRestoreVmRegs(vmRegOp(inst.a), -1);
@ -46,13 +53,13 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
case IrCmd::DO_LEN: case IrCmd::DO_LEN:
case IrCmd::GET_TABLE: case IrCmd::GET_TABLE:
case IrCmd::GET_IMPORT: case IrCmd::GET_IMPORT:
invalidateRestoreOp(inst.a); invalidateRestoreOp(inst.a, /*skipValueInvalidation*/ false);
break; break;
case IrCmd::CONCAT: case IrCmd::CONCAT:
invalidateRestoreVmRegs(vmRegOp(inst.a), function.uintOp(inst.b)); invalidateRestoreVmRegs(vmRegOp(inst.a), function.uintOp(inst.b));
break; break;
case IrCmd::GET_UPVALUE: case IrCmd::GET_UPVALUE:
invalidateRestoreOp(inst.a); invalidateRestoreOp(inst.a, /*skipValueInvalidation*/ false);
break; break;
case IrCmd::CALL: case IrCmd::CALL:
// Even if result count is limited, all registers starting from function (ra) might be modified // Even if result count is limited, all registers starting from function (ra) might be modified
@ -65,7 +72,7 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
break; break;
case IrCmd::FALLBACK_GETGLOBAL: case IrCmd::FALLBACK_GETGLOBAL:
case IrCmd::FALLBACK_GETTABLEKS: case IrCmd::FALLBACK_GETTABLEKS:
invalidateRestoreOp(inst.b); invalidateRestoreOp(inst.b, /*skipValueInvalidation*/ false);
break; break;
case IrCmd::FALLBACK_NAMECALL: case IrCmd::FALLBACK_NAMECALL:
invalidateRestoreVmRegs(vmRegOp(inst.b), 2); invalidateRestoreVmRegs(vmRegOp(inst.b), 2);
@ -74,7 +81,7 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
invalidateRestoreVmRegs(vmRegOp(inst.b), function.intOp(inst.c)); invalidateRestoreVmRegs(vmRegOp(inst.b), function.intOp(inst.c));
break; break;
case IrCmd::FALLBACK_DUPCLOSURE: case IrCmd::FALLBACK_DUPCLOSURE:
invalidateRestoreOp(inst.b); invalidateRestoreOp(inst.b, /*skipValueInvalidation*/ false);
break; break;
case IrCmd::FALLBACK_FORGPREP: case IrCmd::FALLBACK_FORGPREP:
invalidateRestoreVmRegs(vmRegOp(inst.b), 3); invalidateRestoreVmRegs(vmRegOp(inst.b), 3);
@ -180,7 +187,7 @@ void IrValueLocationTracking::recordRestoreOp(uint32_t instIdx, IrOp location)
} }
} }
void IrValueLocationTracking::invalidateRestoreOp(IrOp location) void IrValueLocationTracking::invalidateRestoreOp(IrOp location, bool skipValueInvalidation)
{ {
if (location.kind == IrOpKind::VmReg) if (location.kind == IrOpKind::VmReg)
{ {
@ -190,6 +197,20 @@ void IrValueLocationTracking::invalidateRestoreOp(IrOp location)
{ {
IrInst& inst = function.instructions[instIdx]; IrInst& inst = function.instructions[instIdx];
// If we are only modifying the tag, we can avoid invalidating tracked location of values
if (FFlag::LuauReduceStackSpills && skipValueInvalidation)
{
switch (getCmdValueKind(inst.cmd))
{
case IrValueKind::Double:
case IrValueKind::Pointer:
case IrValueKind::Int:
return;
default:
break;
}
}
// If instruction value is spilled and memory location is about to be lost, it has to be restored immediately // If instruction value is spilled and memory location is about to be lost, it has to be restored immediately
if (inst.needsReload) if (inst.needsReload)
restoreCallback(restoreCallbackCtx, inst); restoreCallback(restoreCallbackCtx, inst);
@ -215,7 +236,7 @@ void IrValueLocationTracking::invalidateRestoreVmRegs(int start, int count)
end = maxReg; end = maxReg;
for (int reg = start; reg <= end; reg++) for (int reg = start; reg <= end; reg++)
invalidateRestoreOp(IrOp{IrOpKind::VmReg, uint8_t(reg)}); invalidateRestoreOp(IrOp{IrOpKind::VmReg, uint8_t(reg)}, /*skipValueInvalidation*/ false);
} }
} // namespace CodeGen } // namespace CodeGen

View file

@ -20,7 +20,7 @@ struct IrValueLocationTracking
void afterInstLowering(IrInst& inst, uint32_t instIdx); void afterInstLowering(IrInst& inst, uint32_t instIdx);
void recordRestoreOp(uint32_t instIdx, IrOp location); void recordRestoreOp(uint32_t instIdx, IrOp location);
void invalidateRestoreOp(IrOp location); void invalidateRestoreOp(IrOp location, bool skipValueInvalidation);
void invalidateRestoreVmRegs(int start, int count); void invalidateRestoreVmRegs(int start, int count);
IrFunction& function; IrFunction& function;

View file

@ -82,6 +82,7 @@ target_sources(Luau.CodeGen PRIVATE
CodeGen/include/Luau/IrData.h CodeGen/include/Luau/IrData.h
CodeGen/include/Luau/IrRegAllocX64.h CodeGen/include/Luau/IrRegAllocX64.h
CodeGen/include/Luau/IrUtils.h CodeGen/include/Luau/IrUtils.h
CodeGen/include/Luau/IrVisitUseDef.h
CodeGen/include/Luau/Label.h CodeGen/include/Luau/Label.h
CodeGen/include/Luau/OperandX64.h CodeGen/include/Luau/OperandX64.h
CodeGen/include/Luau/OptimizeConstProp.h CodeGen/include/Luau/OptimizeConstProp.h

View file

@ -17,8 +17,6 @@
#include <string.h> #include <string.h>
LUAU_FASTFLAGVARIABLE(LuauPCallDebuggerFix, false)
/* /*
** {====================================================== ** {======================================================
** Error-recovery functions ** Error-recovery functions
@ -584,7 +582,7 @@ int luaD_pcall(lua_State* L, Pfunc func, void* u, ptrdiff_t old_top, ptrdiff_t e
L->nCcalls = oldnCcalls; L->nCcalls = oldnCcalls;
// an error occurred, check if we have a protected error callback // an error occurred, check if we have a protected error callback
if ((!FFlag::LuauPCallDebuggerFix || yieldable) && L->global->cb.debugprotectederror) if (yieldable && L->global->cb.debugprotectederror)
{ {
L->global->cb.debugprotectederror(L); L->global->cb.debugprotectederror(L);

View file

@ -25,12 +25,18 @@ static time_t timegm(struct tm* timep)
#elif defined(__FreeBSD__) #elif defined(__FreeBSD__)
static tm* gmtime_r(const time_t* timep, tm* result) static tm* gmtime_r(const time_t* timep, tm* result)
{ {
return gmtime_s(timep, result) == 0 ? result : NULL; // Note: return is reversed from Windows (0 is success on Windows, but 0/null is failure elsewhere)
// Windows https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/gmtime-s-gmtime32-s-gmtime64-s?view=msvc-170#return-value
// Everyone else https://en.cppreference.com/w/c/chrono/gmtime
return gmtime_s(timep, result);
} }
static tm* localtime_r(const time_t* timep, tm* result) static tm* localtime_r(const time_t* timep, tm* result)
{ {
return localtime_s(timep, result) == 0 ? result : NULL; // Note: return is reversed from Windows (0 is success on Windows, but 0/null is failure elsewhere)
// Windows https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-s-localtime32-s-localtime64-s?view=msvc-170#return-value
// Everyone else https://en.cppreference.com/w/c/chrono/localtime
return localtime_s(timep, result);
} }
static time_t timegm(struct tm* timep) static time_t timegm(struct tm* timep)

View file

@ -297,7 +297,6 @@ TEST_CASE_FIXTURE(Fixture, "include_types_ancestry")
TEST_CASE_FIXTURE(Fixture, "find_name_ancestry") TEST_CASE_FIXTURE(Fixture, "find_name_ancestry")
{ {
ScopedFastFlag sff{"FixFindBindingAtFunctionName", true};
check(R"( check(R"(
local tbl = {} local tbl = {}
function tbl:abc() end function tbl:abc() end
@ -312,7 +311,6 @@ TEST_CASE_FIXTURE(Fixture, "find_name_ancestry")
TEST_CASE_FIXTURE(Fixture, "find_expr_ancestry") TEST_CASE_FIXTURE(Fixture, "find_expr_ancestry")
{ {
ScopedFastFlag sff{"FixFindBindingAtFunctionName", true};
check(R"( check(R"(
local tbl = {} local tbl = {}
function tbl:abc() end function tbl:abc() end

View file

@ -23,7 +23,6 @@ extern bool verbose;
extern bool codegen; extern bool codegen;
extern int optimizationLevel; extern int optimizationLevel;
LUAU_FASTFLAG(LuauPCallDebuggerFix);
LUAU_FASTFLAG(LuauFloorDivision); LUAU_FASTFLAG(LuauFloorDivision);
static lua_CompileOptions defaultOptions() static lua_CompileOptions defaultOptions()
@ -146,12 +145,19 @@ using StateRef = std::unique_ptr<lua_State, void (*)(lua_State*)>;
static StateRef runConformance(const char* name, void (*setup)(lua_State* L) = nullptr, void (*yield)(lua_State* L) = nullptr, static StateRef runConformance(const char* name, void (*setup)(lua_State* L) = nullptr, void (*yield)(lua_State* L) = nullptr,
lua_State* initialLuaState = nullptr, lua_CompileOptions* options = nullptr, bool skipCodegen = false) lua_State* initialLuaState = nullptr, lua_CompileOptions* options = nullptr, bool skipCodegen = false)
{ {
#ifdef LUAU_CONFORMANCE_SOURCE_DIR
std::string path = LUAU_CONFORMANCE_SOURCE_DIR;
path += "/";
path += name;
#else
std::string path = __FILE__; std::string path = __FILE__;
path.erase(path.find_last_of("\\/")); path.erase(path.find_last_of("\\/"));
path += "/conformance/"; path += "/conformance/";
path += name; path += name;
#endif
std::fstream stream(path, std::ios::in | std::ios::binary); std::fstream stream(path, std::ios::in | std::ios::binary);
INFO(path);
REQUIRE(stream); REQUIRE(stream);
std::string source(std::istreambuf_iterator<char>(stream), {}); std::string source(std::istreambuf_iterator<char>(stream), {});
@ -1243,30 +1249,7 @@ TEST_CASE("TagMethodError")
// when doLuaBreak is true the test additionally calls lua_break to ensure breaking the debugger doesn't cause the VM to crash // when doLuaBreak is true the test additionally calls lua_break to ensure breaking the debugger doesn't cause the VM to crash
for (bool doLuaBreak : {false, true}) for (bool doLuaBreak : {false, true})
{ {
std::optional<ScopedFastFlag> sff; expectedHits = {22, 32};
if (doLuaBreak)
{
// If doLuaBreak is true then LuauPCallDebuggerFix must be enabled to avoid crashing the tests.
sff = {"LuauPCallDebuggerFix", true};
}
if (FFlag::LuauPCallDebuggerFix)
{
expectedHits = {22, 32};
}
else
{
expectedHits = {
9,
17,
17,
22,
27,
27,
32,
37,
};
}
static int index; static int index;
static bool luaBreak; static bool luaBreak;

View file

@ -12,7 +12,6 @@ ConstraintGraphBuilderFixture::ConstraintGraphBuilderFixture()
mainModule->name = "MainModule"; mainModule->name = "MainModule";
mainModule->humanReadableName = "MainModule"; mainModule->humanReadableName = "MainModule";
BlockedType::DEPRECATED_nextIndex = 0;
BlockedTypePack::nextIndex = 0; BlockedTypePack::nextIndex = 0;
} }

View file

@ -4,6 +4,8 @@
#include "Fixture.h" #include "Fixture.h"
#include "doctest.h" #include "doctest.h"
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
using namespace Luau; using namespace Luau;
static TypeId requireBinding(Scope* scope, const char* name) static TypeId requireBinding(Scope* scope, const char* name)
@ -57,7 +59,11 @@ TEST_CASE_FIXTURE(ConstraintGraphBuilderFixture, "proper_let_generalization")
TypeId idType = requireBinding(rootScope, "b"); TypeId idType = requireBinding(rootScope, "b");
ToStringOptions opts; ToStringOptions opts;
CHECK("<a>(a) -> number" == toString(idType, opts));
if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK("(unknown) -> number" == toString(idType, opts));
else
CHECK("<a>(a) -> number" == toString(idType, opts));
} }
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -876,10 +876,6 @@ TEST_CASE_FIXTURE(NormalizeFixture, "negations_of_tables")
TEST_CASE_FIXTURE(NormalizeFixture, "normalize_blocked_types") TEST_CASE_FIXTURE(NormalizeFixture, "normalize_blocked_types")
{ {
ScopedFastFlag sff[]{
{"LuauNormalizeBlockedTypes", true},
};
Type blocked{BlockedType{}}; Type blocked{BlockedType{}};
const NormalizedType* norm = normalizer.normalize(&blocked); const NormalizedType* norm = normalizer.normalize(&blocked);

View file

@ -63,6 +63,11 @@ struct SubtypeFixture : Fixture
return arena.addType(TableType{std::move(props), std::nullopt, {}, TableState::Sealed}); return arena.addType(TableType{std::move(props), std::nullopt, {}, TableState::Sealed});
} }
TypeId idx(TypeId keyTy, TypeId valueTy)
{
return arena.addType(TableType{{}, TableIndexer{keyTy, valueTy}, {}, TableState::Sealed});
}
// `&` // `&`
TypeId meet(TypeId a, TypeId b) TypeId meet(TypeId a, TypeId b)
{ {
@ -99,6 +104,11 @@ struct SubtypeFixture : Fixture
return ty; return ty;
} }
TypeId opt(TypeId ty)
{
return join(ty, builtinTypes->nilType);
}
TypeId cyclicTable(std::function<void(TypeId, TableType*)>&& cb) TypeId cyclicTable(std::function<void(TypeId, TableType*)>&& cb)
{ {
TypeId res = arena.addType(GenericType{}); TypeId res = arena.addType(GenericType{});
@ -531,6 +541,23 @@ TEST_CASE_FIXTURE(SubtypeFixture, "(number, string) -> string <!: (number, ...st
CHECK_IS_NOT_SUBTYPE(numberAndStringToStringType, numberAndStringsToStringType); CHECK_IS_NOT_SUBTYPE(numberAndStringToStringType, numberAndStringsToStringType);
} }
/*
* <A>(A) -> A <: <X>(X) -> X
* A can be bound to X.
*
* <A>(A) -> A </: <X>(X) -> number
* A can be bound to X, but A </: number
*
* (number) -> number </: <A>(A) -> A
* Only generics on the left side can be bound.
* number </: A
*
* <A, B>(A, B) -> boolean <: <X>(X, X) -> boolean
* It is ok to bind both A and B to X.
*
* <A>(A, A) -> boolean </: <X, Y>(X, Y) -> boolean
* A cannot be bound to both X and Y.
*/
TEST_CASE_FIXTURE(SubtypeFixture, "<T>() -> T <: () -> number") TEST_CASE_FIXTURE(SubtypeFixture, "<T>() -> T <: () -> number")
{ {
CHECK_IS_SUBTYPE(genericNothingToTType, nothingToNumberType); CHECK_IS_SUBTYPE(genericNothingToTType, nothingToNumberType);
@ -636,7 +663,6 @@ TEST_CASE_FIXTURE(SubtypeFixture, "() -> () <!: <A...>(A...) -> A...")
CHECK_IS_NOT_SUBTYPE(nothingToNothingType, genericAsToAsType); CHECK_IS_NOT_SUBTYPE(nothingToNothingType, genericAsToAsType);
} }
TEST_CASE_FIXTURE(SubtypeFixture, "{} <: {}") TEST_CASE_FIXTURE(SubtypeFixture, "{} <: {}")
{ {
CHECK_IS_SUBTYPE(tbl({}), tbl({})); CHECK_IS_SUBTYPE(tbl({}), tbl({}));
@ -647,6 +673,11 @@ TEST_CASE_FIXTURE(SubtypeFixture, "{x: number} <: {}")
CHECK_IS_SUBTYPE(tbl({{"x", builtinTypes->numberType}}), tbl({})); CHECK_IS_SUBTYPE(tbl({{"x", builtinTypes->numberType}}), tbl({}));
} }
TEST_CASE_FIXTURE(SubtypeFixture, "{} <!: {x: number}")
{
CHECK_IS_NOT_SUBTYPE(tbl({}), tbl({{"x", builtinTypes->numberType}}));
}
TEST_CASE_FIXTURE(SubtypeFixture, "{x: number} <!: {x: string}") TEST_CASE_FIXTURE(SubtypeFixture, "{x: number} <!: {x: string}")
{ {
CHECK_IS_NOT_SUBTYPE(tbl({{"x", builtinTypes->numberType}}), tbl({{"x", builtinTypes->stringType}})); CHECK_IS_NOT_SUBTYPE(tbl({{"x", builtinTypes->numberType}}), tbl({{"x", builtinTypes->stringType}}));
@ -972,22 +1003,25 @@ TEST_CASE_FIXTURE(SubtypeFixture, "unknown <: X")
CHECK_MESSAGE(usingGrandChildScope.isSubtype, "Expected " << builtinTypes->unknownType << " <: " << genericX); CHECK_MESSAGE(usingGrandChildScope.isSubtype, "Expected " << builtinTypes->unknownType << " <: " << genericX);
} }
/* TEST_IS_SUBTYPE(idx(builtinTypes->numberType, builtinTypes->numberType), tbl({}));
* <A>(A) -> A <: <X>(X) -> X TEST_IS_NOT_SUBTYPE(tbl({}), idx(builtinTypes->numberType, builtinTypes->numberType));
* A can be bound to X.
* TEST_IS_NOT_SUBTYPE(tbl({{"X", builtinTypes->numberType}}), idx(builtinTypes->numberType, builtinTypes->numberType));
* <A>(A) -> A </: <X>(X) -> number TEST_IS_NOT_SUBTYPE(idx(builtinTypes->numberType, builtinTypes->numberType), tbl({{"X", builtinTypes->numberType}}));
* A can be bound to X, but A </: number
* TEST_IS_NOT_SUBTYPE(idx(join(builtinTypes->numberType, builtinTypes->stringType), builtinTypes->numberType), idx(builtinTypes->numberType, builtinTypes->numberType));
* (number) -> number </: <A>(A) -> A TEST_IS_NOT_SUBTYPE(idx(builtinTypes->numberType, builtinTypes->numberType), idx(join(builtinTypes->numberType, builtinTypes->stringType), builtinTypes->numberType));
* Only generics on the left side can be bound.
* number </: A TEST_IS_NOT_SUBTYPE(idx(builtinTypes->numberType, join(builtinTypes->stringType, builtinTypes->numberType)), idx(builtinTypes->numberType, builtinTypes->numberType));
* TEST_IS_NOT_SUBTYPE(idx(builtinTypes->numberType, builtinTypes->numberType), idx(builtinTypes->numberType, join(builtinTypes->stringType, builtinTypes->numberType)));
* <A, B>(A, B) -> boolean <: <X>(X, X) -> boolean
* It is ok to bind both A and B to X. TEST_IS_NOT_SUBTYPE(tbl({{"X", builtinTypes->numberType}}), idx(builtinTypes->stringType, builtinTypes->numberType));
* TEST_IS_SUBTYPE(idx(builtinTypes->stringType, builtinTypes->numberType), tbl({{"X", builtinTypes->numberType}}));
* <A>(A, A) -> boolean </: <X, Y>(X, Y) -> boolean
* A cannot be bound to both X and Y. TEST_IS_NOT_SUBTYPE(tbl({{"X", opt(builtinTypes->numberType)}}), idx(builtinTypes->stringType, builtinTypes->numberType));
*/ TEST_IS_NOT_SUBTYPE(idx(builtinTypes->stringType, builtinTypes->numberType), tbl({{"X", opt(builtinTypes->numberType)}}));
TEST_IS_SUBTYPE(tbl({{"X", builtinTypes->numberType}, {"Y", builtinTypes->numberType}}), tbl({{"X", builtinTypes->numberType}}));
TEST_IS_NOT_SUBTYPE(tbl({{"X", builtinTypes->numberType}}), tbl({{"X", builtinTypes->numberType}, {"Y", builtinTypes->numberType}}));
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -789,7 +789,10 @@ TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_include_self_param")
auto ttv = get<TableType>(follow(parentTy)); auto ttv = get<TableType>(follow(parentTy));
auto ftv = get<FunctionType>(follow(ttv->props.at("method").type())); auto ftv = get<FunctionType>(follow(ttv->props.at("method").type()));
CHECK_EQ("foo:method<a>(self: a, arg: string): ()", toStringNamedFunction("foo:method", *ftv)); if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK_EQ("foo:method(self: unknown, arg: string): ()", toStringNamedFunction("foo:method", *ftv));
else
CHECK_EQ("foo:method<a>(self: a, arg: string): ()", toStringNamedFunction("foo:method", *ftv));
} }
TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_hide_self_param") TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_hide_self_param")
@ -814,7 +817,10 @@ TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_hide_self_param")
auto ftv = get<FunctionType>(methodTy); auto ftv = get<FunctionType>(methodTy);
REQUIRE_MESSAGE(ftv, "Expected a function but got " << toString(methodTy, opts)); REQUIRE_MESSAGE(ftv, "Expected a function but got " << toString(methodTy, opts));
CHECK_EQ("foo:method<a>(arg: string): ()", toStringNamedFunction("foo:method", *ftv, opts)); if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK_EQ("foo:method(arg: string): ()", toStringNamedFunction("foo:method", *ftv, opts));
else
CHECK_EQ("foo:method<a>(arg: string): ()", toStringNamedFunction("foo:method", *ftv, opts));
} }
TEST_CASE_FIXTURE(Fixture, "tostring_unsee_ttv_if_array") TEST_CASE_FIXTURE(Fixture, "tostring_unsee_ttv_if_array")

View file

@ -1,6 +1,7 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details // This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/TypeFamily.h" #include "Luau/TypeFamily.h"
#include "Luau/ConstraintSolver.h"
#include "Luau/TxnLog.h" #include "Luau/TxnLog.h"
#include "Luau/Type.h" #include "Luau/Type.h"
@ -22,7 +23,7 @@ struct FamilyFixture : Fixture
swapFamily = TypeFamily{/* name */ "Swap", swapFamily = TypeFamily{/* name */ "Swap",
/* reducer */ /* reducer */
[](std::vector<TypeId> tys, std::vector<TypePackId> tps, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, [](std::vector<TypeId> tys, std::vector<TypePackId> tps, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<const TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer) -> TypeFamilyReductionResult<TypeId> { NotNull<const TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver) -> TypeFamilyReductionResult<TypeId> {
LUAU_ASSERT(tys.size() == 1); LUAU_ASSERT(tys.size() == 1);
TypeId param = log->follow(tys.at(0)); TypeId param = log->follow(tys.at(0));
@ -34,8 +35,8 @@ struct FamilyFixture : Fixture
{ {
return TypeFamilyReductionResult<TypeId>{builtins->stringType, false, {}, {}}; return TypeFamilyReductionResult<TypeId>{builtins->stringType, false, {}, {}};
} }
else if (log->get<BlockedType>(param) || log->get<FreeType>(param) || log->get<PendingExpansionType>(param) || else if (log->get<BlockedType>(param) || log->get<PendingExpansionType>(param) ||
log->get<TypeFamilyInstanceType>(param)) log->get<TypeFamilyInstanceType>(param) || (solver && solver->hasUnresolvedConstraints(param)))
{ {
return TypeFamilyReductionResult<TypeId>{std::nullopt, false, {param}, {}}; return TypeFamilyReductionResult<TypeId>{std::nullopt, false, {param}, {}};
} }
@ -233,7 +234,8 @@ TEST_CASE_FIXTURE(Fixture, "internal_families_raise_errors")
TEST_CASE_FIXTURE(BuiltinsFixture, "type_families_inhabited_with_normalization") TEST_CASE_FIXTURE(BuiltinsFixture, "type_families_inhabited_with_normalization")
{ {
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true}; if (!FFlag::DebugLuauDeferredConstraintResolution)
return;
CheckResult result = check(R"( CheckResult result = check(R"(
local useGridConfig : any local useGridConfig : any

View file

@ -1691,6 +1691,10 @@ TEST_CASE_FIXTURE(Fixture, "occurs_check_failure_in_function_return_type")
TEST_CASE_FIXTURE(Fixture, "free_is_not_bound_to_unknown") TEST_CASE_FIXTURE(Fixture, "free_is_not_bound_to_unknown")
{ {
// This test only makes sense for the old solver
if (FFlag::DebugLuauDeferredConstraintResolution)
return;
CheckResult result = check(R"( CheckResult result = check(R"(
local function foo(f: (unknown) -> (), x) local function foo(f: (unknown) -> (), x)
f(x) f(x)
@ -2095,6 +2099,10 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "attempt_to_call_an_intersection_of_tables_wi
TEST_CASE_FIXTURE(Fixture, "generic_packs_are_not_variadic") TEST_CASE_FIXTURE(Fixture, "generic_packs_are_not_variadic")
{ {
// This test is blocking CI until subtyping is complete.
if (!FFlag::DebugLuauDeferredConstraintResolution)
return;
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true}; ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true};
CheckResult result = check(R"( CheckResult result = check(R"(

View file

@ -1192,6 +1192,20 @@ end)
LUAU_REQUIRE_NO_ERRORS(result); LUAU_REQUIRE_NO_ERRORS(result);
} }
TEST_CASE_FIXTURE(Fixture, "apply_type_function_nested_generics3")
{
// This minimization was useful for debugging a particular issue with
// cyclic types under local type inference.
CheckResult result = check(R"(
local getReturnValue: <V>(cb: () -> V) -> V = nil :: any
local y = getReturnValue(function() return nil :: any end)
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(Fixture, "quantify_functions_even_if_they_have_an_explicit_generic") TEST_CASE_FIXTURE(Fixture, "quantify_functions_even_if_they_have_an_explicit_generic")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(

View file

@ -34,6 +34,10 @@ TEST_CASE_FIXTURE(Fixture, "for_loop")
TEST_CASE_FIXTURE(BuiltinsFixture, "iteration_no_table_passed") TEST_CASE_FIXTURE(BuiltinsFixture, "iteration_no_table_passed")
{ {
// This test may block CI if forced to run outside of DCR.
if (!FFlag::DebugLuauDeferredConstraintResolution)
return;
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true}; ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true};
CheckResult result = check(R"( CheckResult result = check(R"(
@ -58,7 +62,9 @@ for a, b in t do end
TEST_CASE_FIXTURE(BuiltinsFixture, "iteration_regression_issue_69967") TEST_CASE_FIXTURE(BuiltinsFixture, "iteration_regression_issue_69967")
{ {
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true}; if (!FFlag::DebugLuauDeferredConstraintResolution)
return;
CheckResult result = check(R"( CheckResult result = check(R"(
type Iterable = typeof(setmetatable( type Iterable = typeof(setmetatable(
{}, {},

View file

@ -14,6 +14,8 @@
using namespace Luau; using namespace Luau;
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
TEST_SUITE_BEGIN("TypeInferOOP"); TEST_SUITE_BEGIN("TypeInferOOP");
TEST_CASE_FIXTURE(Fixture, "dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon") TEST_CASE_FIXTURE(Fixture, "dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon")
@ -337,7 +339,10 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "augmenting_an_unsealed_table_with_a_metatabl
end end
)"); )");
CHECK("{ @metatable { number: number }, { method: <a>(a) -> string } }" == toString(requireType("B"), {true})); if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK("{ @metatable { number: number }, { method: (unknown) -> string } }" == toString(requireType("B"), {true}));
else
CHECK("{ @metatable { number: number }, { method: <a>(a) -> string } }" == toString(requireType("B"), {true}));
} }
TEST_CASE_FIXTURE(BuiltinsFixture, "react_style_oo") TEST_CASE_FIXTURE(BuiltinsFixture, "react_style_oo")
@ -405,8 +410,11 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "cycle_between_object_constructor_and_alias")
CHECK_MESSAGE(get<MetatableType>(follow(aliasType)), "Expected metatable type but got: " << toString(aliasType)); CHECK_MESSAGE(get<MetatableType>(follow(aliasType)), "Expected metatable type but got: " << toString(aliasType));
} }
TEST_CASE_FIXTURE(BuiltinsFixture, "promise_type_error_too_complex") TEST_CASE_FIXTURE(BuiltinsFixture, "promise_type_error_too_complex" * doctest::timeout(0.5))
{ {
// TODO: LTI changes to function call resolution have rendered this test impossibly slow
// shared self should fix it, but there may be other mitigations possible as well
REQUIRE(!FFlag::DebugLuauDeferredConstraintResolution);
ScopedFastFlag sff{"LuauStacklessTypeClone2", true}; ScopedFastFlag sff{"LuauStacklessTypeClone2", true};
frontend.options.retainFullTypeGraphs = false; frontend.options.retainFullTypeGraphs = false;

View file

@ -480,7 +480,11 @@ TEST_CASE_FIXTURE(Fixture, "dcr_can_partially_dispatch_a_constraint")
// would append a new constraint number <: *blocked* to the constraint set // would append a new constraint number <: *blocked* to the constraint set
// to be solved later. This should be faster and theoretically less prone // to be solved later. This should be faster and theoretically less prone
// to cyclic constraint dependencies. // to cyclic constraint dependencies.
CHECK("<a>(a, number) -> ()" == toString(requireType("prime_iter")));
if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK("(unknown, number) -> ()" == toString(requireType("prime_iter")));
else
CHECK("<a>(a, number) -> ()" == toString(requireType("prime_iter")));
} }
TEST_CASE_FIXTURE(Fixture, "free_options_cannot_be_unified_together") TEST_CASE_FIXTURE(Fixture, "free_options_cannot_be_unified_together")

View file

@ -514,7 +514,11 @@ TEST_CASE_FIXTURE(Fixture, "free_type_is_equal_to_an_lvalue")
LUAU_REQUIRE_NO_ERRORS(result); LUAU_REQUIRE_NO_ERRORS(result);
CHECK_EQ(toString(requireTypeAtPosition({3, 33})), "a"); // a == b if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK_EQ(toString(requireTypeAtPosition({3, 33})), "unknown"); // a == b
else
CHECK_EQ(toString(requireTypeAtPosition({3, 33})), "a"); // a == b
CHECK_EQ(toString(requireTypeAtPosition({3, 36})), "string?"); // a == b CHECK_EQ(toString(requireTypeAtPosition({3, 36})), "string?"); // a == b
} }

View file

@ -367,7 +367,7 @@ TEST_CASE_FIXTURE(Fixture, "open_table_unification_3")
CHECK(arg0Table->props.count("baz")); CHECK(arg0Table->props.count("baz"));
} }
TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_1") TEST_CASE_FIXTURE(Fixture, "table_param_width_subtyping_1")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(
function foo(o) function foo(o)
@ -382,7 +382,7 @@ TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_1")
LUAU_REQUIRE_NO_ERRORS(result); LUAU_REQUIRE_NO_ERRORS(result);
} }
TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_2") TEST_CASE_FIXTURE(Fixture, "table_param_width_subtyping_2")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(
--!strict --!strict
@ -403,7 +403,7 @@ TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_2")
CHECK_EQ("baz", error->properties[0]); CHECK_EQ("baz", error->properties[0]);
} }
TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_3") TEST_CASE_FIXTURE(Fixture, "table_param_width_subtyping_3")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(
local T = {} local T = {}
@ -433,7 +433,7 @@ TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_3")
} }
#if 0 #if 0
TEST_CASE_FIXTURE(Fixture, "table_param_row_polymorphism_2") TEST_CASE_FIXTURE(Fixture, "table_param_width_subtyping_2")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(
function id(x) function id(x)

View file

@ -94,6 +94,9 @@ TEST_CASE_FIXTURE(Fixture, "infer_locals_via_assignment_from_its_call_site")
} }
TEST_CASE_FIXTURE(Fixture, "interesting_local_type_inference_case") TEST_CASE_FIXTURE(Fixture, "interesting_local_type_inference_case")
{ {
if (!FFlag::DebugLuauDeferredConstraintResolution)
return;
ScopedFastFlag sff[] = { ScopedFastFlag sff[] = {
{"DebugLuauDeferredConstraintResolution", true}, {"DebugLuauDeferredConstraintResolution", true},
}; };
@ -1086,7 +1089,7 @@ TEST_CASE_FIXTURE(Fixture, "type_infer_recursion_limit_normalizer")
CHECK(1 == result.errors.size()); CHECK(1 == result.errors.size());
CHECK(Location{{3, 12}, {3, 46}} == result.errors[0].location); CHECK(Location{{3, 12}, {3, 46}} == result.errors[0].location);
CHECK_EQ("Internal error: Code is too complex to typecheck! Consider adding type annotations around this area", toString(result.errors[0])); CHECK_EQ("Code is too complex to typecheck! Consider simplifying the code around this area", toString(result.errors[0]));
} }
TEST_CASE_FIXTURE(Fixture, "type_infer_cache_limit_normalizer") TEST_CASE_FIXTURE(Fixture, "type_infer_cache_limit_normalizer")
@ -1099,7 +1102,7 @@ TEST_CASE_FIXTURE(Fixture, "type_infer_cache_limit_normalizer")
)"); )");
LUAU_REQUIRE_ERRORS(result); LUAU_REQUIRE_ERRORS(result);
CHECK_EQ("Internal error: Code is too complex to typecheck! Consider adding type annotations around this area", toString(result.errors[0])); CHECK_EQ("Code is too complex to typecheck! Consider simplifying the code around this area", toString(result.errors[0]));
} }
TEST_CASE_FIXTURE(Fixture, "follow_on_new_types_in_substitution") TEST_CASE_FIXTURE(Fixture, "follow_on_new_types_in_substitution")

View file

@ -311,7 +311,10 @@ TEST_CASE_FIXTURE(Fixture, "dont_unify_operands_if_one_of_the_operand_is_never_i
LUAU_REQUIRE_NO_ERRORS(result); LUAU_REQUIRE_NO_ERRORS(result);
CHECK_EQ("<a>(nil, a) -> boolean", toString(requireType("ord"))); if (FFlag::DebugLuauDeferredConstraintResolution)
CHECK_EQ("(nil, unknown) -> boolean", toString(requireType("ord")));
else
CHECK_EQ("<a>(nil, a) -> boolean", toString(requireType("ord")));
} }
TEST_CASE_FIXTURE(Fixture, "math_operators_and_never") TEST_CASE_FIXTURE(Fixture, "math_operators_and_never")

View file

@ -18,7 +18,7 @@ struct Unifier2Fixture
BuiltinTypes builtinTypes; BuiltinTypes builtinTypes;
Scope scope{builtinTypes.anyTypePack}; Scope scope{builtinTypes.anyTypePack};
InternalErrorReporter iceReporter; InternalErrorReporter iceReporter;
Unifier2 u2{NotNull{&arena}, NotNull{&builtinTypes}, NotNull{&iceReporter}}; Unifier2 u2{NotNull{&arena}, NotNull{&builtinTypes}, NotNull{&scope}, NotNull{&iceReporter}};
ToStringOptions opts; ToStringOptions opts;
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true}; ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true};
@ -111,12 +111,12 @@ TEST_CASE_FIXTURE(Unifier2Fixture, "generalize_a_type_that_is_bounded_by_another
ft2->upperBound = t1; ft2->upperBound = t1;
ft2->lowerBound = builtinTypes.unknownType; ft2->lowerBound = builtinTypes.unknownType;
auto t2generalized = u2.generalize(NotNull{&scope}, t2); auto t2generalized = u2.generalize(t2);
REQUIRE(t2generalized); REQUIRE(t2generalized);
CHECK(follow(t1) == follow(t2)); CHECK(follow(t1) == follow(t2));
auto t1generalized = u2.generalize(NotNull{&scope}, t1); auto t1generalized = u2.generalize(t1);
REQUIRE(t1generalized); REQUIRE(t1generalized);
CHECK(builtinTypes.unknownType == follow(t1)); CHECK(builtinTypes.unknownType == follow(t1));
@ -137,12 +137,12 @@ TEST_CASE_FIXTURE(Unifier2Fixture, "generalize_a_type_that_is_bounded_by_another
ft2->upperBound = t1; ft2->upperBound = t1;
ft2->lowerBound = builtinTypes.unknownType; ft2->lowerBound = builtinTypes.unknownType;
auto t1generalized = u2.generalize(NotNull{&scope}, t1); auto t1generalized = u2.generalize(t1);
REQUIRE(t1generalized); REQUIRE(t1generalized);
CHECK(follow(t1) == follow(t2)); CHECK(follow(t1) == follow(t2));
auto t2generalized = u2.generalize(NotNull{&scope}, t2); auto t2generalized = u2.generalize(t2);
REQUIRE(t2generalized); REQUIRE(t2generalized);
CHECK(builtinTypes.unknownType == follow(t1)); CHECK(builtinTypes.unknownType == follow(t1));

View file

@ -8,9 +8,9 @@
using namespace Luau; using namespace Luau;
LUAU_FASTINT(LuauVisitRecursionLimit) LUAU_FASTINT(LuauVisitRecursionLimit);
TEST_SUITE_BEGIN("VisitTypeVar"); TEST_SUITE_BEGIN("VisitType");
TEST_CASE_FIXTURE(Fixture, "throw_when_limit_is_exceeded") TEST_CASE_FIXTURE(Fixture, "throw_when_limit_is_exceeded")
{ {
@ -38,4 +38,21 @@ TEST_CASE_FIXTURE(Fixture, "dont_throw_when_limit_is_high_enough")
(void)toString(tType); (void)toString(tType);
} }
TEST_CASE_FIXTURE(Fixture, "some_free_types_do_not_have_bounds")
{
Type t{FreeType{TypeLevel{}}};
(void)toString(&t);
}
TEST_CASE_FIXTURE(Fixture, "some_free_types_have_bounds")
{
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true};
Scope scope{builtinTypes->anyTypePack};
Type t{FreeType{&scope, builtinTypes->neverType, builtinTypes->numberType}};
CHECK("('a <: number)" == toString(&t));
}
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -1,14 +1,14 @@
AnnotationTests.infer_type_of_value_a_via_typeof_with_assignment AnnotationTests.infer_type_of_value_a_via_typeof_with_assignment
AnnotationTests.two_type_params
AstQuery.last_argument_function_call_type AstQuery.last_argument_function_call_type
AstQuery::getDocumentationSymbolAtPosition.table_overloaded_function_prop
AutocompleteTest.anonymous_autofilled_generic_on_argument_type_pack_vararg AutocompleteTest.anonymous_autofilled_generic_on_argument_type_pack_vararg
AutocompleteTest.anonymous_autofilled_generic_type_pack_vararg AutocompleteTest.anonymous_autofilled_generic_type_pack_vararg
AutocompleteTest.autocomplete_interpolated_string_as_singleton AutocompleteTest.autocomplete_interpolated_string_as_singleton
AutocompleteTest.autocomplete_oop_implicit_self AutocompleteTest.autocomplete_oop_implicit_self
AutocompleteTest.autocomplete_response_perf1
AutocompleteTest.autocomplete_string_singleton_escape AutocompleteTest.autocomplete_string_singleton_escape
AutocompleteTest.autocomplete_string_singletons AutocompleteTest.autocomplete_string_singletons
AutocompleteTest.cyclic_table AutocompleteTest.cyclic_table
AutocompleteTest.do_wrong_compatible_nonself_calls
AutocompleteTest.suggest_external_module_type AutocompleteTest.suggest_external_module_type
AutocompleteTest.type_correct_expected_argument_type_pack_suggestion AutocompleteTest.type_correct_expected_argument_type_pack_suggestion
AutocompleteTest.type_correct_expected_argument_type_suggestion AutocompleteTest.type_correct_expected_argument_type_suggestion
@ -21,62 +21,76 @@ AutocompleteTest.type_correct_suggestion_in_argument
AutocompleteTest.unsealed_table_2 AutocompleteTest.unsealed_table_2
BuiltinTests.aliased_string_format BuiltinTests.aliased_string_format
BuiltinTests.assert_removes_falsy_types BuiltinTests.assert_removes_falsy_types
BuiltinTests.assert_removes_falsy_types2
BuiltinTests.assert_removes_falsy_types_even_from_type_pack_tail_but_only_for_the_first_type BuiltinTests.assert_removes_falsy_types_even_from_type_pack_tail_but_only_for_the_first_type
BuiltinTests.assert_returns_false_and_string_iff_it_knows_the_first_argument_cannot_be_truthy BuiltinTests.assert_returns_false_and_string_iff_it_knows_the_first_argument_cannot_be_truthy
BuiltinTests.bad_select_should_not_crash BuiltinTests.bad_select_should_not_crash
BuiltinTests.coroutine_resume_anything_goes
BuiltinTests.debug_info_is_crazy
BuiltinTests.global_singleton_types_are_sealed
BuiltinTests.gmatch_capture_types
BuiltinTests.gmatch_capture_types2
BuiltinTests.gmatch_capture_types_balanced_escaped_parens
BuiltinTests.gmatch_capture_types_default_capture
BuiltinTests.gmatch_capture_types_parens_in_sets_are_ignored
BuiltinTests.gmatch_capture_types_set_containing_lbracket
BuiltinTests.gmatch_definition
BuiltinTests.ipairs_iterator_should_infer_types_and_type_check
BuiltinTests.next_iterator_should_infer_types_and_type_check BuiltinTests.next_iterator_should_infer_types_and_type_check
BuiltinTests.pairs_iterator_should_infer_types_and_type_check
BuiltinTests.see_thru_select
BuiltinTests.see_thru_select_count
BuiltinTests.select_slightly_out_of_range BuiltinTests.select_slightly_out_of_range
BuiltinTests.select_way_out_of_range BuiltinTests.select_way_out_of_range
BuiltinTests.select_with_decimal_argument_is_rounded_down
BuiltinTests.set_metatable_needs_arguments BuiltinTests.set_metatable_needs_arguments
BuiltinTests.setmetatable_should_not_mutate_persisted_types BuiltinTests.setmetatable_should_not_mutate_persisted_types
BuiltinTests.sort_with_bad_predicate BuiltinTests.sort_with_bad_predicate
BuiltinTests.string_format_arg_types_inference BuiltinTests.sort_with_predicate
BuiltinTests.string_format_arg_count_mismatch
BuiltinTests.string_format_as_method BuiltinTests.string_format_as_method
BuiltinTests.string_format_correctly_ordered_types BuiltinTests.string_format_correctly_ordered_types
BuiltinTests.string_format_report_all_type_errors_at_correct_positions BuiltinTests.string_format_report_all_type_errors_at_correct_positions
BuiltinTests.string_format_tostring_specifier_type_constraint
BuiltinTests.string_format_use_correct_argument2 BuiltinTests.string_format_use_correct_argument2
BuiltinTests.table_dot_remove_optionally_returns_generic BuiltinTests.table_dot_remove_optionally_returns_generic
BuiltinTests.table_freeze_is_generic BuiltinTests.table_freeze_is_generic
BuiltinTests.table_insert_correctly_infers_type_of_array_2_args_overload BuiltinTests.table_insert_correctly_infers_type_of_array_2_args_overload
BuiltinTests.table_insert_correctly_infers_type_of_array_3_args_overload BuiltinTests.table_insert_correctly_infers_type_of_array_3_args_overload
BuiltinTests.table_pack BuiltinTests.trivial_select
BuiltinTests.table_pack_reduce BuiltinTests.xpcall
BuiltinTests.table_pack_variadic
DefinitionTests.class_definition_indexer DefinitionTests.class_definition_indexer
DefinitionTests.class_definition_overload_metamethods DefinitionTests.class_definition_overload_metamethods
DefinitionTests.class_definition_string_props DefinitionTests.class_definition_string_props
DefinitionTests.definition_file_classes
Differ.equal_generictp_cyclic Differ.equal_generictp_cyclic
Differ.equal_table_A_B_C
Differ.equal_table_cyclic_diamonds_unraveled Differ.equal_table_cyclic_diamonds_unraveled
Differ.equal_table_kind_A
Differ.equal_table_kind_B
Differ.equal_table_kind_C
Differ.equal_table_kind_D
Differ.equal_table_measuring_tapes
Differ.equal_table_two_shifted_circles_are_not_different Differ.equal_table_two_shifted_circles_are_not_different
Differ.function_table_self_referential_cyclic
Differ.generictp_normal Differ.generictp_normal
Differ.generictp_normal_2 Differ.generictp_normal_2
Differ.left_cyclic_table_right_table_missing_property
Differ.left_cyclic_table_right_table_property_wrong
Differ.right_cyclic_table_left_table_property_wrong
Differ.table_left_circle_right_measuring_tape Differ.table_left_circle_right_measuring_tape
GenericsTests.better_mismatch_error_messages GenericsTests.better_mismatch_error_messages
GenericsTests.bidirectional_checking_and_generalization_play_nice GenericsTests.check_generic_typepack_function
GenericsTests.bound_tables_do_not_clone_original_fields
GenericsTests.check_mutual_generic_functions GenericsTests.check_mutual_generic_functions
GenericsTests.correctly_instantiate_polymorphic_member_functions GenericsTests.correctly_instantiate_polymorphic_member_functions
GenericsTests.do_not_infer_generic_functions GenericsTests.do_not_infer_generic_functions
GenericsTests.dont_substitute_bound_types GenericsTests.dont_substitute_bound_types
GenericsTests.generic_argument_count_too_few GenericsTests.generic_argument_count_too_few
GenericsTests.generic_argument_count_too_many GenericsTests.generic_argument_count_too_many
GenericsTests.generic_functions_dont_cache_type_parameters
GenericsTests.generic_functions_should_be_memory_safe GenericsTests.generic_functions_should_be_memory_safe
GenericsTests.generic_type_pack_parentheses GenericsTests.generic_type_pack_parentheses
GenericsTests.generic_type_pack_unification2
GenericsTests.higher_rank_polymorphism_should_not_accept_instantiated_arguments GenericsTests.higher_rank_polymorphism_should_not_accept_instantiated_arguments
GenericsTests.hof_subtype_instantiation_regression
GenericsTests.infer_generic_function_function_argument
GenericsTests.infer_generic_function_function_argument_2 GenericsTests.infer_generic_function_function_argument_2
GenericsTests.infer_generic_function_function_argument_3 GenericsTests.infer_generic_function_function_argument_3
GenericsTests.infer_generic_function_function_argument_overloaded GenericsTests.infer_generic_function_function_argument_overloaded
GenericsTests.infer_generic_lib_function_function_argument GenericsTests.infer_generic_lib_function_function_argument
GenericsTests.infer_generic_property GenericsTests.infer_generic_property
GenericsTests.instantiate_cyclic_generic_function
GenericsTests.instantiate_generic_function_in_assignments GenericsTests.instantiate_generic_function_in_assignments
GenericsTests.instantiate_generic_function_in_assignments2 GenericsTests.instantiate_generic_function_in_assignments2
GenericsTests.instantiated_function_argument_names GenericsTests.instantiated_function_argument_names
@ -87,21 +101,31 @@ GenericsTests.quantify_functions_even_if_they_have_an_explicit_generic
GenericsTests.self_recursive_instantiated_param GenericsTests.self_recursive_instantiated_param
IntersectionTypes.intersection_of_tables_with_top_properties IntersectionTypes.intersection_of_tables_with_top_properties
IntersectionTypes.less_greedy_unification_with_intersection_types IntersectionTypes.less_greedy_unification_with_intersection_types
IntersectionTypes.select_correct_union_fn
IntersectionTypes.should_still_pick_an_overload_whose_arguments_are_unions
IntersectionTypes.table_intersection_write_sealed_indirect IntersectionTypes.table_intersection_write_sealed_indirect
IntersectionTypes.table_write_sealed_indirect IntersectionTypes.table_write_sealed_indirect
Normalize.higher_order_function_with_annotation
Normalize.negations_of_tables Normalize.negations_of_tables
Normalize.specific_functions_cannot_be_negated Normalize.specific_functions_cannot_be_negated
ProvisionalTests.assign_table_with_refined_property_with_a_similar_type_is_illegal ProvisionalTests.assign_table_with_refined_property_with_a_similar_type_is_illegal
ProvisionalTests.bail_early_if_unification_is_too_complicated
ProvisionalTests.choose_the_right_overload_for_pcall
ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack
ProvisionalTests.error_on_eq_metamethod_returning_a_type_other_than_boolean ProvisionalTests.error_on_eq_metamethod_returning_a_type_other_than_boolean
ProvisionalTests.free_is_not_bound_to_any
ProvisionalTests.free_options_can_be_unified_together ProvisionalTests.free_options_can_be_unified_together
ProvisionalTests.free_options_cannot_be_unified_together ProvisionalTests.free_options_cannot_be_unified_together
ProvisionalTests.function_returns_many_things_but_first_of_it_is_forgotten
ProvisionalTests.generic_type_leak_to_module_interface
ProvisionalTests.greedy_inference_with_shared_self_triggers_function_with_no_returns ProvisionalTests.greedy_inference_with_shared_self_triggers_function_with_no_returns
ProvisionalTests.pcall_returns_at_least_two_value_but_function_returns_nothing
ProvisionalTests.setmetatable_constrains_free_type_into_free_table ProvisionalTests.setmetatable_constrains_free_type_into_free_table
ProvisionalTests.specialization_binds_with_prototypes_too_early ProvisionalTests.specialization_binds_with_prototypes_too_early
ProvisionalTests.table_insert_with_a_singleton_argument ProvisionalTests.table_insert_with_a_singleton_argument
ProvisionalTests.table_unification_infinite_recursion ProvisionalTests.table_unification_infinite_recursion
ProvisionalTests.typeguard_inference_incomplete ProvisionalTests.typeguard_inference_incomplete
ProvisionalTests.xpcall_returns_what_f_returns
RefinementTest.call_an_incompatible_function_after_using_typeguard RefinementTest.call_an_incompatible_function_after_using_typeguard
RefinementTest.dataflow_analysis_can_tell_refinements_when_its_appropriate_to_refine_into_nil_or_never RefinementTest.dataflow_analysis_can_tell_refinements_when_its_appropriate_to_refine_into_nil_or_never
RefinementTest.discriminate_from_truthiness_of_x RefinementTest.discriminate_from_truthiness_of_x
@ -121,16 +145,13 @@ RefinementTest.x_as_any_if_x_is_instance_elseif_x_is_table
TableTests.a_free_shape_can_turn_into_a_scalar_if_it_is_compatible TableTests.a_free_shape_can_turn_into_a_scalar_if_it_is_compatible
TableTests.a_free_shape_cannot_turn_into_a_scalar_if_it_is_not_compatible TableTests.a_free_shape_cannot_turn_into_a_scalar_if_it_is_not_compatible
TableTests.call_method TableTests.call_method
TableTests.cannot_augment_sealed_table
TableTests.cannot_change_type_of_unsealed_table_prop TableTests.cannot_change_type_of_unsealed_table_prop
TableTests.casting_sealed_tables_with_props_into_table_with_indexer TableTests.casting_tables_with_props_into_table_with_indexer3
TableTests.casting_tables_with_props_into_table_with_indexer4 TableTests.casting_tables_with_props_into_table_with_indexer4
TableTests.casting_unsealed_tables_with_props_into_table_with_indexer TableTests.casting_unsealed_tables_with_props_into_table_with_indexer
TableTests.checked_prop_too_early TableTests.checked_prop_too_early
TableTests.cli_84607_missing_prop_in_array_or_dict TableTests.cli_84607_missing_prop_in_array_or_dict
TableTests.cyclic_shifted_tables TableTests.cyclic_shifted_tables
TableTests.defining_a_method_for_a_local_sealed_table_must_fail
TableTests.defining_a_self_method_for_a_local_sealed_table_must_fail
TableTests.disallow_indexing_into_an_unsealed_table_with_no_indexer_in_strict_mode TableTests.disallow_indexing_into_an_unsealed_table_with_no_indexer_in_strict_mode
TableTests.dont_crash_when_setmetatable_does_not_produce_a_metatabletypevar TableTests.dont_crash_when_setmetatable_does_not_produce_a_metatabletypevar
TableTests.dont_extend_unsealed_tables_in_rvalue_position TableTests.dont_extend_unsealed_tables_in_rvalue_position
@ -141,51 +162,49 @@ TableTests.dont_suggest_exact_match_keys
TableTests.error_detailed_metatable_prop TableTests.error_detailed_metatable_prop
TableTests.explicitly_typed_table TableTests.explicitly_typed_table
TableTests.explicitly_typed_table_with_indexer TableTests.explicitly_typed_table_with_indexer
TableTests.fuzz_table_unify_instantiated_table_with_prop_realloc
TableTests.generalize_table_argument TableTests.generalize_table_argument
TableTests.generic_table_instantiation_potential_regression TableTests.generic_table_instantiation_potential_regression
TableTests.give_up_after_one_metatable_index_look_up TableTests.give_up_after_one_metatable_index_look_up
TableTests.hide_table_error_properties
TableTests.indexers_get_quantified_too TableTests.indexers_get_quantified_too
TableTests.indexing_from_a_table_should_prefer_properties_when_possible
TableTests.inequality_operators_imply_exactly_matching_types TableTests.inequality_operators_imply_exactly_matching_types
TableTests.infer_array_2 TableTests.infer_array_2
TableTests.infer_indexer_from_value_property_in_literal
TableTests.infer_type_when_indexing_from_a_table_indexer
TableTests.inferred_return_type_of_free_table TableTests.inferred_return_type_of_free_table
TableTests.instantiate_table_cloning_3 TableTests.instantiate_table_cloning_3
TableTests.leaking_bad_metatable_errors TableTests.leaking_bad_metatable_errors
TableTests.less_exponential_blowup_please TableTests.less_exponential_blowup_please
TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
TableTests.mixed_tables_with_implicit_numbered_keys TableTests.mixed_tables_with_implicit_numbered_keys
TableTests.ok_to_add_property_to_free_table
TableTests.ok_to_provide_a_subtype_during_construction TableTests.ok_to_provide_a_subtype_during_construction
TableTests.okay_to_add_property_to_unsealed_tables_by_assignment TableTests.okay_to_add_property_to_unsealed_tables_by_assignment
TableTests.okay_to_add_property_to_unsealed_tables_by_function_call
TableTests.oop_indexer_works TableTests.oop_indexer_works
TableTests.oop_polymorphic TableTests.oop_polymorphic
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2 TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2
TableTests.pass_incompatible_union_to_a_generic_table_without_crashing
TableTests.passing_compatible_unions_to_a_generic_table_without_crashing
TableTests.quantify_even_that_table_was_never_exported_at_all TableTests.quantify_even_that_table_was_never_exported_at_all
TableTests.quantify_metatables_of_metatables_of_table TableTests.quantify_metatables_of_metatables_of_table
TableTests.quantifying_a_bound_var_works
TableTests.reasonable_error_when_adding_a_nonexistent_property_to_an_array_like_table TableTests.reasonable_error_when_adding_a_nonexistent_property_to_an_array_like_table
TableTests.recursive_metatable_type_call TableTests.recursive_metatable_type_call
TableTests.right_table_missing_key2 TableTests.right_table_missing_key2
TableTests.scalar_is_a_subtype_of_a_compatible_polymorphic_shape_type
TableTests.scalar_is_not_a_subtype_of_a_compatible_polymorphic_shape_type
TableTests.sealed_table_indexers_must_unify
TableTests.shared_selfs TableTests.shared_selfs
TableTests.shared_selfs_from_free_param TableTests.shared_selfs_from_free_param
TableTests.shared_selfs_through_metatables TableTests.shared_selfs_through_metatables
TableTests.table_call_metamethod_basic TableTests.table_call_metamethod_basic
TableTests.table_param_width_subtyping_1
TableTests.table_param_width_subtyping_2
TableTests.table_param_width_subtyping_3
TableTests.table_simple_call TableTests.table_simple_call
TableTests.table_subtyping_with_extra_props_dont_report_multiple_errors TableTests.table_subtyping_with_extra_props_dont_report_multiple_errors
TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors
TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors2 TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors2
TableTests.table_unification_4 TableTests.table_unifies_into_map
TableTests.type_mismatch_on_massive_table_is_cut_short TableTests.type_mismatch_on_massive_table_is_cut_short
TableTests.unifying_tables_shouldnt_uaf1
TableTests.used_colon_instead_of_dot TableTests.used_colon_instead_of_dot
TableTests.used_dot_instead_of_colon TableTests.used_dot_instead_of_colon
TableTests.when_augmenting_an_unsealed_table_with_an_indexer_apply_the_correct_scope_to_the_indexer_type
TableTests.wrong_assign_does_hit_indexer TableTests.wrong_assign_does_hit_indexer
ToDot.function ToDot.function
ToString.exhaustive_toString_of_cyclic_table ToString.exhaustive_toString_of_cyclic_table
@ -196,9 +215,11 @@ ToString.toStringDetailed2
ToString.toStringErrorPack ToString.toStringErrorPack
ToString.toStringGenericPack ToString.toStringGenericPack
ToString.toStringNamedFunction_generic_pack ToString.toStringNamedFunction_generic_pack
ToString.toStringNamedFunction_map
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
TryUnifyTests.result_of_failed_typepack_unification_is_constrained TryUnifyTests.result_of_failed_typepack_unification_is_constrained
TryUnifyTests.typepack_unification_should_trim_free_tails TryUnifyTests.typepack_unification_should_trim_free_tails
TryUnifyTests.variadics_should_use_reversed_properly
TypeAliases.dont_lose_track_of_PendingExpansionTypes_after_substitution TypeAliases.dont_lose_track_of_PendingExpansionTypes_after_substitution
TypeAliases.generic_param_remap TypeAliases.generic_param_remap
TypeAliases.mismatched_generic_type_param TypeAliases.mismatched_generic_type_param
@ -211,21 +232,28 @@ TypeAliases.type_alias_local_mutation
TypeAliases.type_alias_local_rename TypeAliases.type_alias_local_rename
TypeAliases.type_alias_locations TypeAliases.type_alias_locations
TypeAliases.type_alias_of_an_imported_recursive_generic_type TypeAliases.type_alias_of_an_imported_recursive_generic_type
TypeFamilyTests.add_family_at_work
TypeFamilyTests.family_as_fn_arg TypeFamilyTests.family_as_fn_arg
TypeFamilyTests.family_as_fn_ret
TypeFamilyTests.function_internal_families
TypeFamilyTests.internal_families_raise_errors
TypeFamilyTests.table_internal_families TypeFamilyTests.table_internal_families
TypeFamilyTests.unsolvable_family TypeFamilyTests.unsolvable_family
TypeInfer.be_sure_to_use_active_txnlog_when_evaluating_a_variadic_overload TypeInfer.be_sure_to_use_active_txnlog_when_evaluating_a_variadic_overload
TypeInfer.bidirectional_checking_of_higher_order_function TypeInfer.bidirectional_checking_of_higher_order_function
TypeInfer.check_expr_recursion_limit
TypeInfer.check_type_infer_recursion_count TypeInfer.check_type_infer_recursion_count
TypeInfer.cli_39932_use_unifier_in_ensure_methods TypeInfer.cli_39932_use_unifier_in_ensure_methods
TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error
TypeInfer.dont_report_type_errors_within_an_AstExprError TypeInfer.dont_report_type_errors_within_an_AstExprError
TypeInfer.dont_report_type_errors_within_an_AstStatError TypeInfer.dont_report_type_errors_within_an_AstStatError
TypeInfer.follow_on_new_types_in_substitution
TypeInfer.fuzz_free_table_type_change_during_index_check TypeInfer.fuzz_free_table_type_change_during_index_check
TypeInfer.infer_assignment_value_types_mutable_lval TypeInfer.infer_assignment_value_types_mutable_lval
TypeInfer.infer_locals_via_assignment_from_its_call_site TypeInfer.infer_locals_via_assignment_from_its_call_site
TypeInfer.interesting_local_type_inference_case
TypeInfer.no_stack_overflow_from_isoptional TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.promote_tail_type_packs
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter_2 TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter_2
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
TypeInfer.type_infer_cache_limit_normalizer TypeInfer.type_infer_cache_limit_normalizer
@ -241,11 +269,16 @@ TypeInferAnyError.for_in_loop_iterator_returns_any2
TypeInferAnyError.intersection_of_any_can_have_props TypeInferAnyError.intersection_of_any_can_have_props
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
TypeInferAnyError.union_of_types_regression_test TypeInferAnyError.union_of_types_regression_test
TypeInferClasses.callable_classes
TypeInferClasses.class_type_mismatch_with_name_conflict TypeInferClasses.class_type_mismatch_with_name_conflict
TypeInferClasses.detailed_class_unification_error
TypeInferClasses.index_instance_property TypeInferClasses.index_instance_property
TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties
TypeInferClasses.table_indexers_are_invariant
TypeInferFunctions.apply_of_lambda_with_inferred_and_explicit_types
TypeInferFunctions.cannot_hoist_interior_defns_into_signature TypeInferFunctions.cannot_hoist_interior_defns_into_signature
TypeInferFunctions.dont_assert_when_the_tarjan_limit_is_exceeded_during_generalization TypeInferFunctions.dont_assert_when_the_tarjan_limit_is_exceeded_during_generalization
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
TypeInferFunctions.dont_infer_parameter_types_for_functions_from_their_call_site TypeInferFunctions.dont_infer_parameter_types_for_functions_from_their_call_site
TypeInferFunctions.function_cast_error_uses_correct_language TypeInferFunctions.function_cast_error_uses_correct_language
TypeInferFunctions.function_decl_non_self_sealed_overwrite_2 TypeInferFunctions.function_decl_non_self_sealed_overwrite_2
@ -253,17 +286,24 @@ TypeInferFunctions.function_decl_non_self_unsealed_overwrite
TypeInferFunctions.function_does_not_return_enough_values TypeInferFunctions.function_does_not_return_enough_values
TypeInferFunctions.function_exprs_are_generalized_at_signature_scope_not_enclosing TypeInferFunctions.function_exprs_are_generalized_at_signature_scope_not_enclosing
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
TypeInferFunctions.generic_packs_are_not_variadic
TypeInferFunctions.higher_order_function_2 TypeInferFunctions.higher_order_function_2
TypeInferFunctions.higher_order_function_3
TypeInferFunctions.higher_order_function_4 TypeInferFunctions.higher_order_function_4
TypeInferFunctions.improved_function_arg_mismatch_errors TypeInferFunctions.improved_function_arg_mismatch_errors
TypeInferFunctions.infer_anonymous_function_arguments TypeInferFunctions.infer_anonymous_function_arguments
TypeInferFunctions.infer_anonymous_function_arguments_outside_call
TypeInferFunctions.infer_generic_function_function_argument TypeInferFunctions.infer_generic_function_function_argument
TypeInferFunctions.infer_generic_function_function_argument_overloaded TypeInferFunctions.infer_generic_function_function_argument_overloaded
TypeInferFunctions.infer_generic_lib_function_function_argument TypeInferFunctions.infer_generic_lib_function_function_argument
TypeInferFunctions.infer_higher_order_function
TypeInferFunctions.infer_return_type_from_selected_overload
TypeInferFunctions.infer_that_function_does_not_return_a_table TypeInferFunctions.infer_that_function_does_not_return_a_table
TypeInferFunctions.it_is_ok_to_oversaturate_a_higher_order_function_argument
TypeInferFunctions.luau_subtyping_is_np_hard TypeInferFunctions.luau_subtyping_is_np_hard
TypeInferFunctions.no_lossy_function_type TypeInferFunctions.no_lossy_function_type
TypeInferFunctions.param_1_and_2_both_takes_the_same_generic_but_their_arguments_are_incompatible
TypeInferFunctions.param_1_and_2_both_takes_the_same_generic_but_their_arguments_are_incompatible_2
TypeInferFunctions.record_matching_overload
TypeInferFunctions.report_exiting_without_return_strict TypeInferFunctions.report_exiting_without_return_strict
TypeInferFunctions.return_type_by_overload TypeInferFunctions.return_type_by_overload
TypeInferFunctions.too_few_arguments_variadic TypeInferFunctions.too_few_arguments_variadic
@ -274,34 +314,51 @@ TypeInferFunctions.too_many_return_values_in_parentheses
TypeInferFunctions.too_many_return_values_no_function TypeInferFunctions.too_many_return_values_no_function
TypeInferLoops.cli_68448_iterators_need_not_accept_nil TypeInferLoops.cli_68448_iterators_need_not_accept_nil
TypeInferLoops.dcr_iteration_on_never_gives_never TypeInferLoops.dcr_iteration_on_never_gives_never
TypeInferLoops.dcr_xpath_candidates
TypeInferLoops.for_in_loop TypeInferLoops.for_in_loop
TypeInferLoops.for_in_loop_error_on_factory_not_returning_the_right_amount_of_values TypeInferLoops.for_in_loop_error_on_factory_not_returning_the_right_amount_of_values
TypeInferLoops.for_in_loop_error_on_iterator_requiring_args_but_none_given
TypeInferLoops.for_in_loop_on_error TypeInferLoops.for_in_loop_on_error
TypeInferLoops.for_in_loop_on_non_function
TypeInferLoops.for_in_loop_with_custom_iterator TypeInferLoops.for_in_loop_with_custom_iterator
TypeInferLoops.for_in_loop_with_incompatible_args_to_iterator TypeInferLoops.for_in_loop_with_incompatible_args_to_iterator
TypeInferLoops.for_in_loop_with_next TypeInferLoops.for_in_loop_with_next
TypeInferLoops.for_in_with_just_one_iterator_is_ok
TypeInferLoops.ipairs_produces_integral_indices TypeInferLoops.ipairs_produces_integral_indices
TypeInferLoops.iteration_regression_issue_69967_alt TypeInferLoops.iteration_regression_issue_69967_alt
TypeInferLoops.loop_iter_basic TypeInferLoops.loop_iter_basic
TypeInferLoops.loop_iter_metamethod_nil TypeInferLoops.loop_iter_metamethod_nil
TypeInferLoops.loop_iter_metamethod_ok
TypeInferLoops.loop_iter_metamethod_ok_with_inference TypeInferLoops.loop_iter_metamethod_ok_with_inference
TypeInferLoops.loop_iter_trailing_nil TypeInferLoops.loop_iter_trailing_nil
TypeInferLoops.loop_typecheck_crash_on_empty_optional
TypeInferLoops.properly_infer_iteratee_is_a_free_table
TypeInferLoops.unreachable_code_after_infinite_loop TypeInferLoops.unreachable_code_after_infinite_loop
TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free
TypeInferModules.bound_free_table_export_is_ok
TypeInferModules.do_not_modify_imported_types_5 TypeInferModules.do_not_modify_imported_types_5
TypeInferModules.module_type_conflict TypeInferModules.module_type_conflict
TypeInferModules.module_type_conflict_instantiated TypeInferModules.module_type_conflict_instantiated
TypeInferModules.require_failed_module
TypeInferOOP.cycle_between_object_constructor_and_alias TypeInferOOP.cycle_between_object_constructor_and_alias
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2 TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
TypeInferOOP.inferring_hundreds_of_self_calls_should_not_suffocate_memory TypeInferOOP.inferring_hundreds_of_self_calls_should_not_suffocate_memory
TypeInferOOP.methods_are_topologically_sorted TypeInferOOP.methods_are_topologically_sorted
TypeInferOOP.object_constructor_can_refer_to_method_of_self
TypeInferOOP.promise_type_error_too_complex
TypeInferOOP.react_style_oo
TypeInferOOP.table_oop
TypeInferOperators.add_type_family_works
TypeInferOperators.and_binexps_dont_unify TypeInferOperators.and_binexps_dont_unify
TypeInferOperators.cli_38355_recursive_union TypeInferOperators.cli_38355_recursive_union
TypeInferOperators.compound_assign_result_must_be_compatible_with_var
TypeInferOperators.concat_op_on_free_lhs_and_string_rhs
TypeInferOperators.concat_op_on_string_lhs_and_free_rhs TypeInferOperators.concat_op_on_string_lhs_and_free_rhs
TypeInferOperators.disallow_string_and_types_without_metatables_from_arithmetic_binary_ops TypeInferOperators.disallow_string_and_types_without_metatables_from_arithmetic_binary_ops
TypeInferOperators.luau_polyfill_is_array TypeInferOperators.luau_polyfill_is_array
TypeInferOperators.operator_eq_completely_incompatible TypeInferOperators.operator_eq_completely_incompatible
TypeInferOperators.strict_binary_op_where_lhs_unknown
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection_on_rhs TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection_on_rhs
TypeInferOperators.typecheck_unary_len_error TypeInferOperators.typecheck_unary_len_error
@ -313,22 +370,18 @@ TypeInferPrimitives.CheckMethodsOfNumber
TypeInferPrimitives.string_index TypeInferPrimitives.string_index
TypeInferUnknownNever.length_of_never TypeInferUnknownNever.length_of_never
TypeInferUnknownNever.math_operators_and_never TypeInferUnknownNever.math_operators_and_never
TypePackTests.detect_cyclic_typepacks2
TypePackTests.higher_order_function TypePackTests.higher_order_function
TypePackTests.pack_tail_unification_check TypePackTests.pack_tail_unification_check
TypePackTests.type_alias_backwards_compatible TypePackTests.type_alias_backwards_compatible
TypePackTests.type_alias_default_type_errors TypePackTests.type_alias_default_type_errors
TypePackTests.type_packs_with_tails_in_vararg_adjustment
TypeSingletons.function_args_infer_singletons TypeSingletons.function_args_infer_singletons
TypeSingletons.function_call_with_singletons TypeSingletons.function_call_with_singletons
TypeSingletons.function_call_with_singletons_mismatch TypeSingletons.function_call_with_singletons_mismatch
TypeSingletons.no_widening_from_callsites
TypeSingletons.overloaded_function_call_with_singletons_mismatch
TypeSingletons.return_type_of_f_is_not_widened TypeSingletons.return_type_of_f_is_not_widened
TypeSingletons.table_properties_type_error_escapes TypeSingletons.table_properties_type_error_escapes
TypeSingletons.widen_the_supertype_if_it_is_free_and_subtype_has_singleton TypeSingletons.widen_the_supertype_if_it_is_free_and_subtype_has_singleton
TypeSingletons.widening_happens_almost_everywhere TypeSingletons.widening_happens_almost_everywhere
UnionTypes.dont_allow_cyclic_unions_to_be_inferred
UnionTypes.generic_function_with_optional_arg
UnionTypes.index_on_a_union_type_with_missing_property UnionTypes.index_on_a_union_type_with_missing_property
UnionTypes.less_greedy_unification_with_union_types UnionTypes.less_greedy_unification_with_union_types
UnionTypes.table_union_write_indirect UnionTypes.table_union_write_indirect