mirror of
https://github.com/luau-lang/luau.git
synced 2024-12-12 13:00:38 +00:00
Sync to upstream/release/619 (#1218)
# What's Changed ## New Type Solver - Many fixes to crashes, assertions, and hangs - Binary type family aliases now have a default parameter - Added a debug check for unsolved types escaping the constraint solver - Overloaded functions are no longer inferred - Unification creates additional subtyping constraints for blocked types - Attempt to guess the result type for type families that are too large to resolve timely ## Native Code Generation - Fixed `IrCmd::CHECK_TRUTHY` lowering in a specific case - Detailed compilation errors are now supported - More work on the new allocator --- # Internal Contributors Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Alexander McCord <amccord@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: James McNellis <jmcnellis@roblox.com> Co-authored-by: Lily Brown <lbrown@roblox.com> Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com> Co-authored-by: Vighnesh Vijay <vvijay@roblox.com>
This commit is contained in:
parent
bac85116f6
commit
47ad768c69
59 changed files with 1853 additions and 626 deletions
|
@ -224,7 +224,6 @@ struct HasIndexerConstraint
|
||||||
// If the table is a free or unsealed table, we augment it with a new indexer.
|
// If the table is a free or unsealed table, we augment it with a new indexer.
|
||||||
struct SetIndexerConstraint
|
struct SetIndexerConstraint
|
||||||
{
|
{
|
||||||
TypeId resultType;
|
|
||||||
TypeId subjectType;
|
TypeId subjectType;
|
||||||
TypeId indexType;
|
TypeId indexType;
|
||||||
TypeId propType;
|
TypeId propType;
|
||||||
|
@ -256,6 +255,20 @@ struct UnpackConstraint
|
||||||
bool resultIsLValue = false;
|
bool resultIsLValue = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// resultType ~ unpack sourceType
|
||||||
|
//
|
||||||
|
// The same as UnpackConstraint, but specialized for a pair of types as opposed to packs.
|
||||||
|
struct Unpack1Constraint
|
||||||
|
{
|
||||||
|
TypeId resultType;
|
||||||
|
TypeId sourceType;
|
||||||
|
|
||||||
|
// UnpackConstraint is sometimes used to resolve the types of assignments.
|
||||||
|
// When this is the case, any LocalTypes in resultPack can have their
|
||||||
|
// domains extended by the corresponding type from sourcePack.
|
||||||
|
bool resultIsLValue = false;
|
||||||
|
};
|
||||||
|
|
||||||
// resultType ~ T0 op T1 op ... op TN
|
// resultType ~ T0 op T1 op ... op TN
|
||||||
//
|
//
|
||||||
// op is either union or intersection. If any of the input types are blocked,
|
// op is either union or intersection. If any of the input types are blocked,
|
||||||
|
@ -290,8 +303,8 @@ struct ReducePackConstraint
|
||||||
|
|
||||||
using ConstraintV = Variant<SubtypeConstraint, PackSubtypeConstraint, GeneralizationConstraint, InstantiationConstraint, IterableConstraint,
|
using ConstraintV = Variant<SubtypeConstraint, PackSubtypeConstraint, GeneralizationConstraint, InstantiationConstraint, IterableConstraint,
|
||||||
NameConstraint, TypeAliasExpansionConstraint, FunctionCallConstraint, FunctionCheckConstraint, PrimitiveTypeConstraint, HasPropConstraint,
|
NameConstraint, TypeAliasExpansionConstraint, FunctionCallConstraint, FunctionCheckConstraint, PrimitiveTypeConstraint, HasPropConstraint,
|
||||||
SetPropConstraint, HasIndexerConstraint, SetIndexerConstraint, SingletonOrTopTypeConstraint, UnpackConstraint, SetOpConstraint, ReduceConstraint, ReducePackConstraint,
|
SetPropConstraint, HasIndexerConstraint, SetIndexerConstraint, SingletonOrTopTypeConstraint, UnpackConstraint, Unpack1Constraint,
|
||||||
EqualityConstraint>;
|
SetOpConstraint, ReduceConstraint, ReducePackConstraint, EqualityConstraint>;
|
||||||
|
|
||||||
struct Constraint
|
struct Constraint
|
||||||
{
|
{
|
||||||
|
|
|
@ -95,6 +95,10 @@ struct ConstraintGenerator
|
||||||
// will enqueue them during solving.
|
// will enqueue them during solving.
|
||||||
std::vector<ConstraintPtr> unqueuedConstraints;
|
std::vector<ConstraintPtr> unqueuedConstraints;
|
||||||
|
|
||||||
|
// Type family instances created by the generator. This is used to ensure
|
||||||
|
// that these instances are reduced fully by the solver.
|
||||||
|
std::vector<TypeId> familyInstances;
|
||||||
|
|
||||||
// The private scope of type aliases for which the type parameters belong to.
|
// The private scope of type aliases for which the type parameters belong to.
|
||||||
DenseHashMap<const AstStatTypeAlias*, ScopePtr> astTypeAliasDefiningScopes{nullptr};
|
DenseHashMap<const AstStatTypeAlias*, ScopePtr> astTypeAliasDefiningScopes{nullptr};
|
||||||
|
|
||||||
|
@ -254,16 +258,18 @@ private:
|
||||||
Inference check(const ScopePtr& scope, AstExprTable* expr, std::optional<TypeId> expectedType);
|
Inference check(const ScopePtr& scope, AstExprTable* expr, std::optional<TypeId> expectedType);
|
||||||
std::tuple<TypeId, TypeId, RefinementId> checkBinary(const ScopePtr& scope, AstExprBinary* binary, std::optional<TypeId> expectedType);
|
std::tuple<TypeId, TypeId, RefinementId> checkBinary(const ScopePtr& scope, AstExprBinary* binary, std::optional<TypeId> expectedType);
|
||||||
|
|
||||||
/**
|
struct LValueBounds
|
||||||
* Generate constraints to assign assignedTy to the expression expr
|
{
|
||||||
* @returns the type of the expression. This may or may not be assignedTy itself.
|
std::optional<TypeId> annotationTy;
|
||||||
*/
|
std::optional<TypeId> assignedTy;
|
||||||
std::optional<TypeId> checkLValue(const ScopePtr& scope, AstExpr* expr, TypeId assignedTy, bool transform);
|
};
|
||||||
std::optional<TypeId> checkLValue(const ScopePtr& scope, AstExprLocal* local, TypeId assignedTy, bool transform);
|
|
||||||
std::optional<TypeId> checkLValue(const ScopePtr& scope, AstExprGlobal* global, TypeId assignedTy);
|
LValueBounds checkLValue(const ScopePtr& scope, AstExpr* expr, bool transform);
|
||||||
std::optional<TypeId> checkLValue(const ScopePtr& scope, AstExprIndexName* indexName, TypeId assignedTy);
|
LValueBounds checkLValue(const ScopePtr& scope, AstExprLocal* local, bool transform);
|
||||||
std::optional<TypeId> checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr, TypeId assignedTy);
|
LValueBounds checkLValue(const ScopePtr& scope, AstExprGlobal* global);
|
||||||
TypeId updateProperty(const ScopePtr& scope, AstExpr* expr, TypeId assignedTy);
|
LValueBounds checkLValue(const ScopePtr& scope, AstExprIndexName* indexName);
|
||||||
|
LValueBounds checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr);
|
||||||
|
LValueBounds updateProperty(const ScopePtr& scope, AstExpr* expr);
|
||||||
|
|
||||||
struct FunctionSignature
|
struct FunctionSignature
|
||||||
{
|
{
|
||||||
|
@ -370,6 +376,8 @@ private:
|
||||||
* yields a vector of size 1, with value: [number | string]
|
* yields a vector of size 1, with value: [number | string]
|
||||||
*/
|
*/
|
||||||
std::vector<std::optional<TypeId>> getExpectedCallTypesForFunctionOverloads(const TypeId fnType);
|
std::vector<std::optional<TypeId>> getExpectedCallTypesForFunctionOverloads(const TypeId fnType);
|
||||||
|
|
||||||
|
TypeId createFamilyInstance(TypeFamilyInstanceType instance, const ScopePtr& scope, Location location);
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Borrow a vector of pointers from a vector of owning pointers to constraints.
|
/** Borrow a vector of pointers from a vector of owning pointers to constraints.
|
||||||
|
|
|
@ -25,6 +25,8 @@ enum class ValueContext;
|
||||||
|
|
||||||
struct DcrLogger;
|
struct DcrLogger;
|
||||||
|
|
||||||
|
class AstExpr;
|
||||||
|
|
||||||
// TypeId, TypePackId, or Constraint*. It is impossible to know which, but we
|
// TypeId, TypePackId, or Constraint*. It is impossible to know which, but we
|
||||||
// never dereference this pointer.
|
// never dereference this pointer.
|
||||||
using BlockedConstraintId = Variant<TypeId, TypePackId, const Constraint*>;
|
using BlockedConstraintId = Variant<TypeId, TypePackId, const Constraint*>;
|
||||||
|
@ -73,6 +75,9 @@ struct ConstraintSolver
|
||||||
// A constraint can be both blocked and unsolved, for instance.
|
// A constraint can be both blocked and unsolved, for instance.
|
||||||
std::vector<NotNull<const Constraint>> unsolvedConstraints;
|
std::vector<NotNull<const Constraint>> unsolvedConstraints;
|
||||||
|
|
||||||
|
// This is a set of type families that need to be reduced after all constraints have been dispatched.
|
||||||
|
DenseHashSet<TypeId> familyInstances{nullptr};
|
||||||
|
|
||||||
// A mapping of constraint pointer to how many things the constraint is
|
// A mapping of constraint pointer to how many things the constraint is
|
||||||
// blocked on. Can be empty or 0 for constraints that are not blocked on
|
// blocked on. Can be empty or 0 for constraints that are not blocked on
|
||||||
// anything.
|
// anything.
|
||||||
|
@ -130,14 +135,23 @@ struct ConstraintSolver
|
||||||
bool tryDispatch(const FunctionCheckConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const FunctionCheckConstraint& c, NotNull<const Constraint> constraint);
|
||||||
bool tryDispatch(const PrimitiveTypeConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const PrimitiveTypeConstraint& c, NotNull<const Constraint> constraint);
|
||||||
bool tryDispatch(const HasPropConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const HasPropConstraint& c, NotNull<const Constraint> constraint);
|
||||||
bool tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint, bool force);
|
bool tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint);
|
||||||
|
|
||||||
bool tryDispatchHasIndexer(int& recursionDepth, NotNull<const Constraint> constraint, TypeId subjectType, TypeId indexType, TypeId resultType);
|
bool tryDispatchHasIndexer(int& recursionDepth, NotNull<const Constraint> constraint, TypeId subjectType, TypeId indexType, TypeId resultType);
|
||||||
bool tryDispatch(const HasIndexerConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const HasIndexerConstraint& c, NotNull<const Constraint> constraint);
|
||||||
|
|
||||||
|
/// (dispatched, found) where
|
||||||
|
/// - dispatched: this constraint can be considered having dispatched.
|
||||||
|
/// - found: true if adding an indexer for a particular type was allowed.
|
||||||
|
std::pair<bool, bool> tryDispatchSetIndexer(NotNull<const Constraint> constraint, TypeId subjectType, TypeId indexType, TypeId propType, bool expandFreeTypeBounds);
|
||||||
bool tryDispatch(const SetIndexerConstraint& c, NotNull<const Constraint> constraint, bool force);
|
bool tryDispatch(const SetIndexerConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||||
|
|
||||||
bool tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull<const Constraint> constraint);
|
||||||
|
|
||||||
|
bool tryDispatchUnpack1(NotNull<const Constraint> constraint, TypeId resultType, TypeId sourceType, bool resultIsLValue);
|
||||||
bool tryDispatch(const UnpackConstraint& c, NotNull<const Constraint> constraint);
|
bool tryDispatch(const UnpackConstraint& c, NotNull<const Constraint> constraint);
|
||||||
|
bool tryDispatch(const Unpack1Constraint& c, NotNull<const Constraint> constraint);
|
||||||
|
|
||||||
bool tryDispatch(const SetOpConstraint& c, NotNull<const Constraint> constraint, bool force);
|
bool tryDispatch(const SetOpConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||||
bool tryDispatch(const ReduceConstraint& c, NotNull<const Constraint> constraint, bool force);
|
bool tryDispatch(const ReduceConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||||
bool tryDispatch(const ReducePackConstraint& c, NotNull<const Constraint> constraint, bool force);
|
bool tryDispatch(const ReducePackConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||||
|
@ -151,10 +165,10 @@ struct ConstraintSolver
|
||||||
bool tryDispatchIterableFunction(
|
bool tryDispatchIterableFunction(
|
||||||
TypeId nextTy, TypeId tableTy, TypeId firstIndexTy, const IterableConstraint& c, NotNull<const Constraint> constraint, bool force);
|
TypeId nextTy, TypeId tableTy, TypeId firstIndexTy, const IterableConstraint& c, NotNull<const Constraint> constraint, bool force);
|
||||||
|
|
||||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(
|
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(NotNull<const Constraint> constraint, TypeId subjectType,
|
||||||
TypeId subjectType, const std::string& propName, ValueContext context, bool inConditional = false, bool suppressSimplification = false);
|
const std::string& propName, ValueContext context, bool inConditional = false, bool suppressSimplification = false);
|
||||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(
|
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(NotNull<const Constraint> constraint, TypeId subjectType,
|
||||||
TypeId subjectType, const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification, DenseHashSet<TypeId>& seen);
|
const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification, DenseHashSet<TypeId>& seen);
|
||||||
|
|
||||||
void block(NotNull<const Constraint> target, NotNull<const Constraint> constraint);
|
void block(NotNull<const Constraint> target, NotNull<const Constraint> constraint);
|
||||||
/**
|
/**
|
||||||
|
@ -242,17 +256,6 @@ struct ConstraintSolver
|
||||||
*/
|
*/
|
||||||
bool hasUnresolvedConstraints(TypeId ty);
|
bool hasUnresolvedConstraints(TypeId ty);
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new Unifier and performs a single unification operation.
|
|
||||||
*
|
|
||||||
* @param subType the sub-type to unify.
|
|
||||||
* @param superType the super-type to unify.
|
|
||||||
* @returns true if the unification succeeded. False if the unification was
|
|
||||||
* too complex.
|
|
||||||
*/
|
|
||||||
template <typename TID>
|
|
||||||
bool unify(NotNull<Scope> scope, Location location, TID subType, TID superType);
|
|
||||||
|
|
||||||
/** Attempts to unify subTy with superTy. If doing so would require unifying
|
/** Attempts to unify subTy with superTy. If doing so would require unifying
|
||||||
* BlockedTypes, fail and block the constraint on those BlockedTypes.
|
* BlockedTypes, fail and block the constraint on those BlockedTypes.
|
||||||
*
|
*
|
||||||
|
|
|
@ -299,6 +299,7 @@ using MagicFunction = std::function<std::optional<WithPredicate<TypePackId>>(
|
||||||
struct MagicFunctionCallContext
|
struct MagicFunctionCallContext
|
||||||
{
|
{
|
||||||
NotNull<struct ConstraintSolver> solver;
|
NotNull<struct ConstraintSolver> solver;
|
||||||
|
NotNull<const Constraint> constraint;
|
||||||
const class AstExprCall* callSite;
|
const class AstExprCall* callSite;
|
||||||
TypePackId arguments;
|
TypePackId arguments;
|
||||||
TypePackId result;
|
TypePackId result;
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
#include "Luau/TypeFwd.h"
|
#include "Luau/TypeFwd.h"
|
||||||
#include "Luau/VisitType.h"
|
#include "Luau/VisitType.h"
|
||||||
#include "Luau/NotNull.h"
|
#include "Luau/NotNull.h"
|
||||||
|
#include "TypeArena.h"
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -42,11 +43,14 @@ struct TypeFamilyReductionGuesser
|
||||||
DenseHashSet<TypeId> cyclicInstances{nullptr};
|
DenseHashSet<TypeId> cyclicInstances{nullptr};
|
||||||
|
|
||||||
// Utilities
|
// Utilities
|
||||||
|
NotNull<TypeArena> arena;
|
||||||
NotNull<BuiltinTypes> builtins;
|
NotNull<BuiltinTypes> builtins;
|
||||||
NotNull<Normalizer> normalizer;
|
NotNull<Normalizer> normalizer;
|
||||||
|
|
||||||
TypeFamilyReductionGuesser(NotNull<BuiltinTypes> builtins, NotNull<Normalizer> normalizer);
|
TypeFamilyReductionGuesser(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Normalizer> normalizer);
|
||||||
|
|
||||||
|
std::optional<TypeId> guess(TypeId typ);
|
||||||
|
std::optional<TypePackId> guess(TypePackId typ);
|
||||||
TypeFamilyReductionGuessResult guessTypeFamilyReductionForFunction(const AstExprFunction& expr, const FunctionType* ftv, TypeId retTy);
|
TypeFamilyReductionGuessResult guessTypeFamilyReductionForFunction(const AstExprFunction& expr, const FunctionType* ftv, TypeId retTy);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -2,11 +2,12 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "Luau/Constraint.h"
|
||||||
#include "Luau/DenseHash.h"
|
#include "Luau/DenseHash.h"
|
||||||
#include "Luau/NotNull.h"
|
#include "Luau/NotNull.h"
|
||||||
#include "Luau/TypePairHash.h"
|
|
||||||
#include "Luau/TypeCheckLimits.h"
|
#include "Luau/TypeCheckLimits.h"
|
||||||
#include "Luau/TypeFwd.h"
|
#include "Luau/TypeFwd.h"
|
||||||
|
#include "Luau/TypePairHash.h"
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
@ -46,6 +47,8 @@ struct Unifier2
|
||||||
int recursionCount = 0;
|
int recursionCount = 0;
|
||||||
int recursionLimit = 0;
|
int recursionLimit = 0;
|
||||||
|
|
||||||
|
std::vector<ConstraintV> incompleteSubtypes;
|
||||||
|
|
||||||
Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<InternalErrorReporter> ice);
|
Unifier2(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<InternalErrorReporter> ice);
|
||||||
|
|
||||||
/** Attempt to commit the subtype relation subTy <: superTy to the type
|
/** Attempt to commit the subtype relation subTy <: superTy to the type
|
||||||
|
@ -61,6 +64,7 @@ struct Unifier2
|
||||||
* free TypePack to another and encounter an occurs check violation.
|
* free TypePack to another and encounter an occurs check violation.
|
||||||
*/
|
*/
|
||||||
bool unify(TypeId subTy, TypeId superTy);
|
bool unify(TypeId subTy, TypeId superTy);
|
||||||
|
bool unifyFreeWithType(TypeId subTy, TypeId superTy);
|
||||||
bool unify(const LocalType* subTy, TypeId superFn);
|
bool unify(const LocalType* subTy, TypeId superFn);
|
||||||
bool unify(TypeId subTy, const FunctionType* superFn);
|
bool unify(TypeId subTy, const FunctionType* superFn);
|
||||||
bool unify(const UnionType* subUnion, TypeId superTy);
|
bool unify(const UnionType* subUnion, TypeId superTy);
|
||||||
|
|
|
@ -64,6 +64,9 @@ inline void unsee(DenseHashSet<void*>& seen, const void* tv)
|
||||||
|
|
||||||
} // namespace visit_detail
|
} // namespace visit_detail
|
||||||
|
|
||||||
|
// recursion counter is equivalent here, but we'd like a better name to express the intent.
|
||||||
|
using TypeFamilyDepthCounter = RecursionCounter;
|
||||||
|
|
||||||
template<typename S>
|
template<typename S>
|
||||||
struct GenericTypeVisitor
|
struct GenericTypeVisitor
|
||||||
{
|
{
|
||||||
|
@ -72,6 +75,7 @@ struct GenericTypeVisitor
|
||||||
Set seen;
|
Set seen;
|
||||||
bool skipBoundTypes = false;
|
bool skipBoundTypes = false;
|
||||||
int recursionCounter = 0;
|
int recursionCounter = 0;
|
||||||
|
int typeFamilyDepth = 0;
|
||||||
|
|
||||||
GenericTypeVisitor() = default;
|
GenericTypeVisitor() = default;
|
||||||
|
|
||||||
|
@ -400,6 +404,8 @@ struct GenericTypeVisitor
|
||||||
}
|
}
|
||||||
else if (auto tfit = get<TypeFamilyInstanceType>(ty))
|
else if (auto tfit = get<TypeFamilyInstanceType>(ty))
|
||||||
{
|
{
|
||||||
|
TypeFamilyDepthCounter tfdc{&typeFamilyDepth};
|
||||||
|
|
||||||
if (visit(ty, *tfit))
|
if (visit(ty, *tfit))
|
||||||
{
|
{
|
||||||
for (TypeId p : tfit->typeArguments)
|
for (TypeId p : tfit->typeArguments)
|
||||||
|
@ -460,6 +466,8 @@ struct GenericTypeVisitor
|
||||||
visit(tp, *btp);
|
visit(tp, *btp);
|
||||||
else if (auto tfitp = get<TypeFamilyInstanceTypePack>(tp))
|
else if (auto tfitp = get<TypeFamilyInstanceTypePack>(tp))
|
||||||
{
|
{
|
||||||
|
TypeFamilyDepthCounter tfdc{&typeFamilyDepth};
|
||||||
|
|
||||||
if (visit(tp, *tfitp))
|
if (visit(tp, *tfitp))
|
||||||
{
|
{
|
||||||
for (TypeId t : tfitp->typeArguments)
|
for (TypeId t : tfitp->typeArguments)
|
||||||
|
|
|
@ -431,7 +431,7 @@ static bool dcrMagicFunctionFormat(MagicFunctionCallContext context)
|
||||||
// unify the prefix one argument at a time
|
// unify the prefix one argument at a time
|
||||||
for (size_t i = 0; i < expected.size() && i + paramOffset < params.size(); ++i)
|
for (size_t i = 0; i < expected.size() && i + paramOffset < params.size(); ++i)
|
||||||
{
|
{
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[i + paramOffset], expected[i]);
|
context.solver->unify(context.constraint, params[i + paramOffset], expected[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if we know the argument count or if we have too many arguments for sure, we can issue an error
|
// if we know the argument count or if we have too many arguments for sure, we can issue an error
|
||||||
|
@ -561,7 +561,7 @@ static bool dcrMagicFunctionGmatch(MagicFunctionCallContext context)
|
||||||
if (returnTypes.empty())
|
if (returnTypes.empty())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[0], context.solver->builtinTypes->stringType);
|
context.solver->unify(context.constraint, params[0], context.solver->builtinTypes->stringType);
|
||||||
|
|
||||||
const TypePackId emptyPack = arena->addTypePack({});
|
const TypePackId emptyPack = arena->addTypePack({});
|
||||||
const TypePackId returnList = arena->addTypePack(returnTypes);
|
const TypePackId returnList = arena->addTypePack(returnTypes);
|
||||||
|
@ -630,13 +630,13 @@ static bool dcrMagicFunctionMatch(MagicFunctionCallContext context)
|
||||||
if (returnTypes.empty())
|
if (returnTypes.empty())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[0], context.solver->builtinTypes->stringType);
|
context.solver->unify(context.constraint, params[0], context.solver->builtinTypes->stringType);
|
||||||
|
|
||||||
const TypeId optionalNumber = arena->addType(UnionType{{context.solver->builtinTypes->nilType, context.solver->builtinTypes->numberType}});
|
const TypeId optionalNumber = arena->addType(UnionType{{context.solver->builtinTypes->nilType, context.solver->builtinTypes->numberType}});
|
||||||
|
|
||||||
size_t initIndex = context.callSite->self ? 1 : 2;
|
size_t initIndex = context.callSite->self ? 1 : 2;
|
||||||
if (params.size() == 3 && context.callSite->args.size > initIndex)
|
if (params.size() == 3 && context.callSite->args.size > initIndex)
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[2], optionalNumber);
|
context.solver->unify(context.constraint, params[2], optionalNumber);
|
||||||
|
|
||||||
const TypePackId returnList = arena->addTypePack(returnTypes);
|
const TypePackId returnList = arena->addTypePack(returnTypes);
|
||||||
asMutable(context.result)->ty.emplace<BoundTypePack>(returnList);
|
asMutable(context.result)->ty.emplace<BoundTypePack>(returnList);
|
||||||
|
@ -733,17 +733,17 @@ static bool dcrMagicFunctionFind(MagicFunctionCallContext context)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[0], builtinTypes->stringType);
|
context.solver->unify(context.constraint, params[0], builtinTypes->stringType);
|
||||||
|
|
||||||
const TypeId optionalNumber = arena->addType(UnionType{{builtinTypes->nilType, builtinTypes->numberType}});
|
const TypeId optionalNumber = arena->addType(UnionType{{builtinTypes->nilType, builtinTypes->numberType}});
|
||||||
const TypeId optionalBoolean = arena->addType(UnionType{{builtinTypes->nilType, builtinTypes->booleanType}});
|
const TypeId optionalBoolean = arena->addType(UnionType{{builtinTypes->nilType, builtinTypes->booleanType}});
|
||||||
|
|
||||||
size_t initIndex = context.callSite->self ? 1 : 2;
|
size_t initIndex = context.callSite->self ? 1 : 2;
|
||||||
if (params.size() >= 3 && context.callSite->args.size > initIndex)
|
if (params.size() >= 3 && context.callSite->args.size > initIndex)
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[2], optionalNumber);
|
context.solver->unify(context.constraint, params[2], optionalNumber);
|
||||||
|
|
||||||
if (params.size() == 4 && context.callSite->args.size > plainIndex)
|
if (params.size() == 4 && context.callSite->args.size > plainIndex)
|
||||||
context.solver->unify(context.solver->rootScope, context.callSite->location, params[3], optionalBoolean);
|
context.solver->unify(context.constraint, params[3], optionalBoolean);
|
||||||
|
|
||||||
returnTypes.insert(returnTypes.begin(), {optionalNumber, optionalNumber});
|
returnTypes.insert(returnTypes.begin(), {optionalNumber, optionalNumber});
|
||||||
|
|
||||||
|
|
|
@ -525,12 +525,11 @@ void ConstraintGenerator::applyRefinements(const ScopePtr& scope, Location locat
|
||||||
{
|
{
|
||||||
if (mustDeferIntersection(ty) || mustDeferIntersection(dt))
|
if (mustDeferIntersection(ty) || mustDeferIntersection(dt))
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.refineFamily},
|
NotNull{&kBuiltinTypeFamilies.refineFamily},
|
||||||
{ty, dt},
|
{ty, dt},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, location);
|
||||||
addConstraint(scope, location, ReduceConstraint{resultType});
|
|
||||||
|
|
||||||
ty = resultType;
|
ty = resultType;
|
||||||
}
|
}
|
||||||
|
@ -1005,8 +1004,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatFunction* f
|
||||||
else if (AstExprIndexName* indexName = function->name->as<AstExprIndexName>())
|
else if (AstExprIndexName* indexName = function->name->as<AstExprIndexName>())
|
||||||
{
|
{
|
||||||
Checkpoint check1 = checkpoint(this);
|
Checkpoint check1 = checkpoint(this);
|
||||||
std::optional<TypeId> lvalueType = checkLValue(scope, indexName, generalizedType);
|
auto [_, lvalueType] = checkLValue(scope, indexName);
|
||||||
LUAU_ASSERT(lvalueType);
|
|
||||||
Checkpoint check2 = checkpoint(this);
|
Checkpoint check2 = checkpoint(this);
|
||||||
|
|
||||||
forEachConstraint(check1, check2, this, [&excludeList](const ConstraintPtr& c) {
|
forEachConstraint(check1, check2, this, [&excludeList](const ConstraintPtr& c) {
|
||||||
|
@ -1017,7 +1015,8 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatFunction* f
|
||||||
|
|
||||||
if (lvalueType && *lvalueType != generalizedType)
|
if (lvalueType && *lvalueType != generalizedType)
|
||||||
{
|
{
|
||||||
addConstraint(scope, indexName->location, SubtypeConstraint{*lvalueType, generalizedType});
|
LUAU_ASSERT(get<BlockedType>(lvalueType));
|
||||||
|
emplaceType<BoundType>(asMutable(*lvalueType), generalizedType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (AstExprError* err = function->name->as<AstExprError>())
|
else if (AstExprError* err = function->name->as<AstExprError>())
|
||||||
|
@ -1110,14 +1109,17 @@ static void bindFreeType(TypeId a, TypeId b)
|
||||||
|
|
||||||
ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatAssign* assign)
|
ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatAssign* assign)
|
||||||
{
|
{
|
||||||
std::vector<TypeId> assignees;
|
std::vector<TypeId> upperBounds;
|
||||||
assignees.reserve(assign->vars.size);
|
upperBounds.reserve(assign->vars.size);
|
||||||
|
|
||||||
|
std::vector<TypeId> typeStates;
|
||||||
|
typeStates.reserve(assign->vars.size);
|
||||||
|
|
||||||
|
Checkpoint lvalueBeginCheckpoint = checkpoint(this);
|
||||||
|
|
||||||
size_t i = 0;
|
size_t i = 0;
|
||||||
for (AstExpr* lvalue : assign->vars)
|
for (AstExpr* lvalue : assign->vars)
|
||||||
{
|
{
|
||||||
TypeId assignee = arena->addType(BlockedType{});
|
|
||||||
|
|
||||||
// This is a really weird thing to do, but it's critically important for some kinds of
|
// This is a really weird thing to do, but it's critically important for some kinds of
|
||||||
// assignments with the current type state behavior. Consider this code:
|
// assignments with the current type state behavior. Consider this code:
|
||||||
// local function f(l, r)
|
// local function f(l, r)
|
||||||
|
@ -1158,18 +1160,28 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatAssign* ass
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
checkLValue(scope, lvalue, assignee, transform);
|
auto [upperBound, typeState] = checkLValue(scope, lvalue, transform);
|
||||||
assignees.push_back(assignee);
|
upperBounds.push_back(upperBound.value_or(builtinTypes->unknownType));
|
||||||
|
typeStates.push_back(typeState.value_or(builtinTypes->unknownType));
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Checkpoint lvalueEndCheckpoint = checkpoint(this);
|
||||||
|
|
||||||
TypePackId resultPack = checkPack(scope, assign->values).tp;
|
TypePackId resultPack = checkPack(scope, assign->values).tp;
|
||||||
auto c = addConstraint(scope, assign->location, UnpackConstraint{arena->addTypePack(assignees), resultPack, /*resultIsLValue*/ true});
|
auto uc = addConstraint(scope, assign->location, UnpackConstraint{arena->addTypePack(typeStates), resultPack, /*resultIsLValue*/ true});
|
||||||
for (TypeId assignee : assignees)
|
forEachConstraint(lvalueBeginCheckpoint, lvalueEndCheckpoint, this, [uc](const ConstraintPtr& constraint) {
|
||||||
|
uc->dependencies.push_back(NotNull{constraint.get()});
|
||||||
|
});
|
||||||
|
|
||||||
|
auto psc = addConstraint(scope, assign->location, PackSubtypeConstraint{resultPack, arena->addTypePack(std::move(upperBounds))});
|
||||||
|
psc->dependencies.push_back(uc);
|
||||||
|
|
||||||
|
for (TypeId assignee : typeStates)
|
||||||
{
|
{
|
||||||
auto blocked = getMutable<BlockedType>(assignee);
|
auto blocked = getMutable<BlockedType>(assignee);
|
||||||
LUAU_ASSERT(blocked);
|
if (blocked && !blocked->getOwner())
|
||||||
blocked->setOwner(c);
|
blocked->setOwner(uc);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ControlFlow::None;
|
return ControlFlow::None;
|
||||||
|
@ -1180,7 +1192,21 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatCompoundAss
|
||||||
AstExprBinary binop = AstExprBinary{assign->location, assign->op, assign->var, assign->value};
|
AstExprBinary binop = AstExprBinary{assign->location, assign->op, assign->var, assign->value};
|
||||||
TypeId resultTy = check(scope, &binop).ty;
|
TypeId resultTy = check(scope, &binop).ty;
|
||||||
|
|
||||||
checkLValue(scope, assign->var, resultTy, true);
|
auto [upperBound, typeState] = checkLValue(scope, assign->var, true);
|
||||||
|
|
||||||
|
Constraint* sc = nullptr;
|
||||||
|
if (upperBound)
|
||||||
|
sc = addConstraint(scope, assign->location, SubtypeConstraint{resultTy, *upperBound});
|
||||||
|
|
||||||
|
if (typeState)
|
||||||
|
{
|
||||||
|
NotNull<Constraint> uc = addConstraint(scope, assign->location, Unpack1Constraint{*typeState, resultTy, /*resultIsLValue=*/ true});
|
||||||
|
if (auto blocked = getMutable<BlockedType>(*typeState); blocked && !blocked->getOwner())
|
||||||
|
blocked->setOwner(uc);
|
||||||
|
|
||||||
|
if (sc)
|
||||||
|
uc->dependencies.push_back(NotNull{sc});
|
||||||
|
}
|
||||||
|
|
||||||
DefId def = dfg->getDef(assign->var);
|
DefId def = dfg->getDef(assign->var);
|
||||||
scope->lvalueTypes[def] = resultTy;
|
scope->lvalueTypes[def] = resultTy;
|
||||||
|
@ -2024,32 +2050,29 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprUnary* unary)
|
||||||
{
|
{
|
||||||
case AstExprUnary::Op::Not:
|
case AstExprUnary::Op::Not:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.notFamily},
|
NotNull{&kBuiltinTypeFamilies.notFamily},
|
||||||
{operandType},
|
{operandType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, unary->location);
|
||||||
addConstraint(scope, unary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, refinementArena.negation(refinement)};
|
return Inference{resultType, refinementArena.negation(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprUnary::Op::Len:
|
case AstExprUnary::Op::Len:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.lenFamily},
|
NotNull{&kBuiltinTypeFamilies.lenFamily},
|
||||||
{operandType},
|
{operandType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, unary->location);
|
||||||
addConstraint(scope, unary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, refinementArena.negation(refinement)};
|
return Inference{resultType, refinementArena.negation(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprUnary::Op::Minus:
|
case AstExprUnary::Op::Minus:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.unmFamily},
|
NotNull{&kBuiltinTypeFamilies.unmFamily},
|
||||||
{operandType},
|
{operandType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, unary->location);
|
||||||
addConstraint(scope, unary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, refinementArena.negation(refinement)};
|
return Inference{resultType, refinementArena.negation(refinement)};
|
||||||
}
|
}
|
||||||
default: // msvc can't prove that this is exhaustive.
|
default: // msvc can't prove that this is exhaustive.
|
||||||
|
@ -2065,153 +2088,138 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprBinary* binar
|
||||||
{
|
{
|
||||||
case AstExprBinary::Op::Add:
|
case AstExprBinary::Op::Add:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.addFamily},
|
NotNull{&kBuiltinTypeFamilies.addFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Sub:
|
case AstExprBinary::Op::Sub:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.subFamily},
|
NotNull{&kBuiltinTypeFamilies.subFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Mul:
|
case AstExprBinary::Op::Mul:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.mulFamily},
|
NotNull{&kBuiltinTypeFamilies.mulFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Div:
|
case AstExprBinary::Op::Div:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.divFamily},
|
NotNull{&kBuiltinTypeFamilies.divFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::FloorDiv:
|
case AstExprBinary::Op::FloorDiv:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.idivFamily},
|
NotNull{&kBuiltinTypeFamilies.idivFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Pow:
|
case AstExprBinary::Op::Pow:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.powFamily},
|
NotNull{&kBuiltinTypeFamilies.powFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Mod:
|
case AstExprBinary::Op::Mod:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.modFamily},
|
NotNull{&kBuiltinTypeFamilies.modFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Concat:
|
case AstExprBinary::Op::Concat:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.concatFamily},
|
NotNull{&kBuiltinTypeFamilies.concatFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::And:
|
case AstExprBinary::Op::And:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.andFamily},
|
NotNull{&kBuiltinTypeFamilies.andFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Or:
|
case AstExprBinary::Op::Or:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.orFamily},
|
NotNull{&kBuiltinTypeFamilies.orFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::CompareLt:
|
case AstExprBinary::Op::CompareLt:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.ltFamily},
|
NotNull{&kBuiltinTypeFamilies.ltFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::CompareGe:
|
case AstExprBinary::Op::CompareGe:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.ltFamily},
|
NotNull{&kBuiltinTypeFamilies.ltFamily},
|
||||||
{rightType, leftType}, // lua decided that `__ge(a, b)` is instead just `__lt(b, a)`
|
{rightType, leftType}, // lua decided that `__ge(a, b)` is instead just `__lt(b, a)`
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::CompareLe:
|
case AstExprBinary::Op::CompareLe:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.leFamily},
|
NotNull{&kBuiltinTypeFamilies.leFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::CompareGt:
|
case AstExprBinary::Op::CompareGt:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.leFamily},
|
NotNull{&kBuiltinTypeFamilies.leFamily},
|
||||||
{rightType, leftType}, // lua decided that `__gt(a, b)` is instead just `__le(b, a)`
|
{rightType, leftType}, // lua decided that `__gt(a, b)` is instead just `__le(b, a)`
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::CompareEq:
|
case AstExprBinary::Op::CompareEq:
|
||||||
case AstExprBinary::Op::CompareNe:
|
case AstExprBinary::Op::CompareNe:
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.eqFamily},
|
NotNull{&kBuiltinTypeFamilies.eqFamily},
|
||||||
{leftType, rightType},
|
{leftType, rightType},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, binary->location);
|
||||||
addConstraint(scope, binary->location, ReduceConstraint{resultType});
|
|
||||||
return Inference{resultType, std::move(refinement)};
|
return Inference{resultType, std::move(refinement)};
|
||||||
}
|
}
|
||||||
case AstExprBinary::Op::Op__Count:
|
case AstExprBinary::Op::Op__Count:
|
||||||
|
@ -2365,31 +2373,29 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGenerator::checkBinary(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExpr* expr, TypeId assignedTy, bool transform)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExpr* expr, bool transform)
|
||||||
{
|
{
|
||||||
if (auto local = expr->as<AstExprLocal>())
|
if (auto local = expr->as<AstExprLocal>())
|
||||||
return checkLValue(scope, local, assignedTy, transform);
|
return checkLValue(scope, local, transform);
|
||||||
else if (auto global = expr->as<AstExprGlobal>())
|
else if (auto global = expr->as<AstExprGlobal>())
|
||||||
return checkLValue(scope, global, assignedTy);
|
return checkLValue(scope, global);
|
||||||
else if (auto indexName = expr->as<AstExprIndexName>())
|
else if (auto indexName = expr->as<AstExprIndexName>())
|
||||||
return checkLValue(scope, indexName, assignedTy);
|
return checkLValue(scope, indexName);
|
||||||
else if (auto indexExpr = expr->as<AstExprIndexExpr>())
|
else if (auto indexExpr = expr->as<AstExprIndexExpr>())
|
||||||
return checkLValue(scope, indexExpr, assignedTy);
|
return checkLValue(scope, indexExpr);
|
||||||
else if (auto error = expr->as<AstExprError>())
|
else if (auto error = expr->as<AstExprError>())
|
||||||
{
|
{
|
||||||
check(scope, error);
|
check(scope, error);
|
||||||
return builtinTypes->errorRecoveryType();
|
return {builtinTypes->errorRecoveryType(), builtinTypes->errorRecoveryType()};
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
ice->ice("checkLValue is inexhaustive");
|
ice->ice("checkLValue is inexhaustive");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprLocal* local, TypeId assignedTy, bool transform)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprLocal* local, bool transform)
|
||||||
{
|
{
|
||||||
std::optional<TypeId> annotatedTy = scope->lookup(local->local);
|
std::optional<TypeId> annotatedTy = scope->lookup(local->local);
|
||||||
LUAU_ASSERT(annotatedTy);
|
LUAU_ASSERT(annotatedTy);
|
||||||
if (annotatedTy)
|
|
||||||
addConstraint(scope, local->location, SubtypeConstraint{assignedTy, *annotatedTy});
|
|
||||||
|
|
||||||
const DefId defId = dfg->getDef(local);
|
const DefId defId = dfg->getDef(local);
|
||||||
std::optional<TypeId> ty = scope->lookupUnrefinedType(defId);
|
std::optional<TypeId> ty = scope->lookupUnrefinedType(defId);
|
||||||
|
@ -2430,41 +2436,48 @@ std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, As
|
||||||
scope->lvalueTypes[defId] = *ty;
|
scope->lvalueTypes[defId] = *ty;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: Need to clip this, but this requires more code to be reworked first before we can clip this.
|
||||||
|
std::optional<TypeId> assignedTy;
|
||||||
|
|
||||||
if (transform)
|
if (transform)
|
||||||
{
|
{
|
||||||
Constraint* owner = nullptr;
|
assignedTy = arena->addType(BlockedType{});
|
||||||
if (auto blocked = get<BlockedType>(*ty))
|
|
||||||
owner = blocked->getOwner();
|
|
||||||
|
|
||||||
auto unpackC = addConstraint(scope, local->location,
|
auto unpackC = addConstraint(scope, local->location,
|
||||||
UnpackConstraint{arena->addTypePack({*ty}), arena->addTypePack({assignedTy}),
|
UnpackConstraint{arena->addTypePack({*ty}), arena->addTypePack({*assignedTy}),
|
||||||
/*resultIsLValue*/ true});
|
/*resultIsLValue*/ true});
|
||||||
|
|
||||||
if (owner)
|
if (auto blocked = get<BlockedType>(*ty))
|
||||||
unpackC->dependencies.push_back(NotNull{owner});
|
{
|
||||||
else if (auto blocked = getMutable<BlockedType>(*ty))
|
if (blocked->getOwner())
|
||||||
blocked->setOwner(unpackC);
|
unpackC->dependencies.push_back(NotNull{blocked->getOwner()});
|
||||||
|
else if (auto blocked = getMutable<BlockedType>(*ty))
|
||||||
|
blocked->setOwner(unpackC);
|
||||||
|
}
|
||||||
|
|
||||||
recordInferredBinding(local->local, *ty);
|
recordInferredBinding(local->local, *ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return {annotatedTy, assignedTy};
|
||||||
return ty;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprGlobal* global, TypeId assignedTy)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprGlobal* global)
|
||||||
{
|
{
|
||||||
return scope->lookup(Symbol{global->name});
|
std::optional<TypeId> annotatedTy = scope->lookup(Symbol{global->name});
|
||||||
|
if (annotatedTy)
|
||||||
|
return {annotatedTy, arena->addType(BlockedType{})};
|
||||||
|
else
|
||||||
|
return {annotatedTy, std::nullopt};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprIndexName* indexName, TypeId assignedTy)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprIndexName* indexName)
|
||||||
{
|
{
|
||||||
return updateProperty(scope, indexName, assignedTy);
|
return updateProperty(scope, indexName);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr, TypeId assignedTy)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr)
|
||||||
{
|
{
|
||||||
return updateProperty(scope, indexExpr, assignedTy);
|
return updateProperty(scope, indexExpr);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -2472,15 +2485,14 @@ std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, As
|
||||||
*
|
*
|
||||||
* If expr has the form name.a.b.c
|
* If expr has the form name.a.b.c
|
||||||
*/
|
*/
|
||||||
TypeId ConstraintGenerator::updateProperty(const ScopePtr& scope, AstExpr* expr, TypeId assignedTy)
|
ConstraintGenerator::LValueBounds ConstraintGenerator::updateProperty(const ScopePtr& scope, AstExpr* expr)
|
||||||
{
|
{
|
||||||
// There are a bunch of cases where we realize that this is not the kind of
|
// There are a bunch of cases where we realize that this is not the kind of
|
||||||
// assignment that potentially changes the shape of a table. When we
|
// assignment that potentially changes the shape of a table. When we
|
||||||
// encounter them, we call this to fall back and do the "usual thing."
|
// encounter them, we call this to fall back and do the "usual thing."
|
||||||
auto fallback = [&]() {
|
auto fallback = [&]() -> LValueBounds {
|
||||||
TypeId resTy = check(scope, expr).ty;
|
TypeId resTy = check(scope, expr).ty;
|
||||||
addConstraint(scope, expr->location, SubtypeConstraint{assignedTy, resTy});
|
return {resTy, std::nullopt};
|
||||||
return resTy;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
LUAU_ASSERT(expr->is<AstExprIndexName>() || expr->is<AstExprIndexExpr>());
|
LUAU_ASSERT(expr->is<AstExprIndexName>() || expr->is<AstExprIndexExpr>());
|
||||||
|
@ -2499,15 +2511,14 @@ TypeId ConstraintGenerator::updateProperty(const ScopePtr& scope, AstExpr* expr,
|
||||||
// a.b.c[1] = 44 -- lvalue
|
// a.b.c[1] = 44 -- lvalue
|
||||||
// a.b[4].c = 2 -- rvalue
|
// a.b[4].c = 2 -- rvalue
|
||||||
|
|
||||||
TypeId resultType = arena->addType(BlockedType{});
|
|
||||||
TypeId subjectType = check(scope, indexExpr->expr).ty;
|
TypeId subjectType = check(scope, indexExpr->expr).ty;
|
||||||
TypeId indexType = check(scope, indexExpr->index).ty;
|
TypeId indexType = check(scope, indexExpr->index).ty;
|
||||||
auto c = addConstraint(scope, expr->location, SetIndexerConstraint{resultType, subjectType, indexType, assignedTy});
|
TypeId assignedTy = arena->addType(BlockedType{});
|
||||||
getMutable<BlockedType>(resultType)->setOwner(c);
|
addConstraint(scope, expr->location, SetIndexerConstraint{subjectType, indexType, assignedTy});
|
||||||
|
|
||||||
module->astTypes[expr] = assignedTy;
|
module->astTypes[expr] = assignedTy;
|
||||||
|
|
||||||
return assignedTy;
|
return {assignedTy, assignedTy};
|
||||||
}
|
}
|
||||||
|
|
||||||
Symbol sym;
|
Symbol sym;
|
||||||
|
@ -2573,6 +2584,7 @@ TypeId ConstraintGenerator::updateProperty(const ScopePtr& scope, AstExpr* expr,
|
||||||
std::vector<std::string> segmentStrings(begin(segments), end(segments));
|
std::vector<std::string> segmentStrings(begin(segments), end(segments));
|
||||||
|
|
||||||
TypeId updatedType = arena->addType(BlockedType{});
|
TypeId updatedType = arena->addType(BlockedType{});
|
||||||
|
TypeId assignedTy = arena->addType(BlockedType{});
|
||||||
auto setC = addConstraint(scope, expr->location, SetPropConstraint{updatedType, subjectType, std::move(segmentStrings), assignedTy});
|
auto setC = addConstraint(scope, expr->location, SetPropConstraint{updatedType, subjectType, std::move(segmentStrings), assignedTy});
|
||||||
getMutable<BlockedType>(updatedType)->setOwner(setC);
|
getMutable<BlockedType>(updatedType)->setOwner(setC);
|
||||||
|
|
||||||
|
@ -2604,7 +2616,7 @@ TypeId ConstraintGenerator::updateProperty(const ScopePtr& scope, AstExpr* expr,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return assignedTy;
|
return {assignedTy, assignedTy};
|
||||||
}
|
}
|
||||||
|
|
||||||
Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprTable* expr, std::optional<TypeId> expectedType)
|
Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprTable* expr, std::optional<TypeId> expectedType)
|
||||||
|
@ -3287,24 +3299,22 @@ void ConstraintGenerator::reportCodeTooComplex(Location location)
|
||||||
|
|
||||||
TypeId ConstraintGenerator::makeUnion(const ScopePtr& scope, Location location, TypeId lhs, TypeId rhs)
|
TypeId ConstraintGenerator::makeUnion(const ScopePtr& scope, Location location, TypeId lhs, TypeId rhs)
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.unionFamily},
|
NotNull{&kBuiltinTypeFamilies.unionFamily},
|
||||||
{lhs, rhs},
|
{lhs, rhs},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, location);
|
||||||
addConstraint(scope, location, ReduceConstraint{resultType});
|
|
||||||
|
|
||||||
return resultType;
|
return resultType;
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeId ConstraintGenerator::makeIntersect(const ScopePtr& scope, Location location, TypeId lhs, TypeId rhs)
|
TypeId ConstraintGenerator::makeIntersect(const ScopePtr& scope, Location location, TypeId lhs, TypeId rhs)
|
||||||
{
|
{
|
||||||
TypeId resultType = arena->addType(TypeFamilyInstanceType{
|
TypeId resultType = createFamilyInstance(TypeFamilyInstanceType{
|
||||||
NotNull{&kBuiltinTypeFamilies.intersectFamily},
|
NotNull{&kBuiltinTypeFamilies.intersectFamily},
|
||||||
{lhs, rhs},
|
{lhs, rhs},
|
||||||
{},
|
{},
|
||||||
});
|
}, scope, location);
|
||||||
addConstraint(scope, location, ReduceConstraint{resultType});
|
|
||||||
|
|
||||||
return resultType;
|
return resultType;
|
||||||
}
|
}
|
||||||
|
@ -3454,6 +3464,14 @@ std::vector<std::optional<TypeId>> ConstraintGenerator::getExpectedCallTypesForF
|
||||||
return expectedTypes;
|
return expectedTypes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TypeId ConstraintGenerator::createFamilyInstance(TypeFamilyInstanceType instance, const ScopePtr& scope, Location location)
|
||||||
|
{
|
||||||
|
TypeId result = arena->addType(std::move(instance));
|
||||||
|
addConstraint(scope, location, ReduceConstraint{result});
|
||||||
|
familyInstances.push_back(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<NotNull<Constraint>> borrowConstraints(const std::vector<ConstraintPtr>& constraints)
|
std::vector<NotNull<Constraint>> borrowConstraints(const std::vector<ConstraintPtr>& constraints)
|
||||||
{
|
{
|
||||||
std::vector<NotNull<Constraint>> result;
|
std::vector<NotNull<Constraint>> result;
|
||||||
|
|
|
@ -273,9 +273,10 @@ struct InstantiationQueuer : TypeOnceVisitor
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool visit(TypeId ty, const TypeFamilyInstanceType& tfit) override
|
bool visit(TypeId ty, const TypeFamilyInstanceType&) override
|
||||||
{
|
{
|
||||||
solver->pushConstraint(scope, location, ReduceConstraint{ty});
|
solver->pushConstraint(scope, location, ReduceConstraint{ty});
|
||||||
|
solver->familyInstances.insert(ty);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -454,6 +455,16 @@ void ConstraintSolver::run()
|
||||||
progress |= runSolverPass(true);
|
progress |= runSolverPass(true);
|
||||||
} while (progress);
|
} while (progress);
|
||||||
|
|
||||||
|
for (TypeId instance : familyInstances)
|
||||||
|
{
|
||||||
|
if (FFlag::DebugLuauLogSolver)
|
||||||
|
printf("Post-solve family reduction of %s\n", toString(instance).c_str());
|
||||||
|
|
||||||
|
TypeCheckLimits limits{};
|
||||||
|
FamilyGraphReductionResult result =
|
||||||
|
reduceFamilies(instance, Location{}, TypeFamilyContext{arena, builtinTypes, rootScope, normalizer, NotNull{&iceReporter}, NotNull{&limits}}, false);
|
||||||
|
}
|
||||||
|
|
||||||
if (FFlag::DebugLuauLogSolver)
|
if (FFlag::DebugLuauLogSolver)
|
||||||
{
|
{
|
||||||
dumpBindings(rootScope, opts);
|
dumpBindings(rootScope, opts);
|
||||||
|
@ -479,29 +490,6 @@ struct TypeAndLocation
|
||||||
Location location;
|
Location location;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct FreeTypeSearcher : TypeOnceVisitor
|
|
||||||
{
|
|
||||||
VecDeque<TypeAndLocation>* result;
|
|
||||||
Location location;
|
|
||||||
|
|
||||||
FreeTypeSearcher(VecDeque<TypeAndLocation>* result, Location location)
|
|
||||||
: result(result)
|
|
||||||
, location(location)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
bool visit(TypeId ty, const FreeType&) override
|
|
||||||
{
|
|
||||||
result->push_back({ty, location});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool visit(TypeId, const ClassType&) override
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(NotNull<const Constraint> constraint, bool force)
|
bool ConstraintSolver::tryDispatch(NotNull<const Constraint> constraint, bool force)
|
||||||
|
@ -534,7 +522,7 @@ bool ConstraintSolver::tryDispatch(NotNull<const Constraint> constraint, bool fo
|
||||||
else if (auto hpc = get<HasPropConstraint>(*constraint))
|
else if (auto hpc = get<HasPropConstraint>(*constraint))
|
||||||
success = tryDispatch(*hpc, constraint);
|
success = tryDispatch(*hpc, constraint);
|
||||||
else if (auto spc = get<SetPropConstraint>(*constraint))
|
else if (auto spc = get<SetPropConstraint>(*constraint))
|
||||||
success = tryDispatch(*spc, constraint, force);
|
success = tryDispatch(*spc, constraint);
|
||||||
else if (auto spc = get<HasIndexerConstraint>(*constraint))
|
else if (auto spc = get<HasIndexerConstraint>(*constraint))
|
||||||
success = tryDispatch(*spc, constraint);
|
success = tryDispatch(*spc, constraint);
|
||||||
else if (auto spc = get<SetIndexerConstraint>(*constraint))
|
else if (auto spc = get<SetIndexerConstraint>(*constraint))
|
||||||
|
@ -543,6 +531,8 @@ bool ConstraintSolver::tryDispatch(NotNull<const Constraint> constraint, bool fo
|
||||||
success = tryDispatch(*sottc, constraint);
|
success = tryDispatch(*sottc, constraint);
|
||||||
else if (auto uc = get<UnpackConstraint>(*constraint))
|
else if (auto uc = get<UnpackConstraint>(*constraint))
|
||||||
success = tryDispatch(*uc, constraint);
|
success = tryDispatch(*uc, constraint);
|
||||||
|
else if (auto uc = get<Unpack1Constraint>(*constraint))
|
||||||
|
success = tryDispatch(*uc, constraint);
|
||||||
else if (auto soc = get<SetOpConstraint>(*constraint))
|
else if (auto soc = get<SetOpConstraint>(*constraint))
|
||||||
success = tryDispatch(*soc, constraint, force);
|
success = tryDispatch(*soc, constraint, force);
|
||||||
else if (auto rc = get<ReduceConstraint>(*constraint))
|
else if (auto rc = get<ReduceConstraint>(*constraint))
|
||||||
|
@ -626,7 +616,10 @@ bool ConstraintSolver::tryDispatch(const GeneralizationConstraint& c, NotNull<co
|
||||||
unblock(c.sourceType, constraint->location);
|
unblock(c.sourceType, constraint->location);
|
||||||
|
|
||||||
for (TypeId ty : c.interiorTypes)
|
for (TypeId ty : c.interiorTypes)
|
||||||
|
{
|
||||||
u2.generalize(ty);
|
u2.generalize(ty);
|
||||||
|
unblock(ty, constraint->location);
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -1075,7 +1068,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
|
||||||
if (ftv)
|
if (ftv)
|
||||||
{
|
{
|
||||||
if (ftv->dcrMagicFunction)
|
if (ftv->dcrMagicFunction)
|
||||||
usedMagic = ftv->dcrMagicFunction(MagicFunctionCallContext{NotNull(this), c.callSite, c.argsPack, result});
|
usedMagic = ftv->dcrMagicFunction(MagicFunctionCallContext{NotNull{this}, constraint, c.callSite, c.argsPack, result});
|
||||||
|
|
||||||
if (ftv->dcrMagicRefinement)
|
if (ftv->dcrMagicRefinement)
|
||||||
ftv->dcrMagicRefinement(MagicRefinementContext{constraint->scope, c.callSite, c.discriminantTypes});
|
ftv->dcrMagicRefinement(MagicRefinementContext{constraint->scope, c.callSite, c.discriminantTypes});
|
||||||
|
@ -1292,7 +1285,7 @@ bool ConstraintSolver::tryDispatch(const HasPropConstraint& c, NotNull<const Con
|
||||||
if (isBlocked(subjectType) || get<PendingExpansionType>(subjectType) || get<TypeFamilyInstanceType>(subjectType))
|
if (isBlocked(subjectType) || get<PendingExpansionType>(subjectType) || get<TypeFamilyInstanceType>(subjectType))
|
||||||
return block(subjectType, constraint);
|
return block(subjectType, constraint);
|
||||||
|
|
||||||
auto [blocked, result] = lookupTableProp(subjectType, c.prop, c.context, c.inConditional, c.suppressSimplification);
|
auto [blocked, result] = lookupTableProp(constraint, subjectType, c.prop, c.context, c.inConditional, c.suppressSimplification);
|
||||||
if (!blocked.empty())
|
if (!blocked.empty())
|
||||||
{
|
{
|
||||||
for (TypeId blocked : blocked)
|
for (TypeId blocked : blocked)
|
||||||
|
@ -1381,7 +1374,7 @@ static void updateTheTableType(
|
||||||
tt->props[lastSegment].setType(replaceTy);
|
tt->props[lastSegment].setType(replaceTy);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint, bool force)
|
bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Constraint> constraint)
|
||||||
{
|
{
|
||||||
TypeId subjectType = follow(c.subjectType);
|
TypeId subjectType = follow(c.subjectType);
|
||||||
const TypeId propType = follow(c.propType);
|
const TypeId propType = follow(c.propType);
|
||||||
|
@ -1403,7 +1396,7 @@ bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Con
|
||||||
|
|
||||||
ValueContext ctx = i == c.path.size() - 1 ? ValueContext::LValue : ValueContext::RValue;
|
ValueContext ctx = i == c.path.size() - 1 ? ValueContext::LValue : ValueContext::RValue;
|
||||||
|
|
||||||
auto [blocked, result] = lookupTableProp(*existingPropType, segment, ctx);
|
auto [blocked, result] = lookupTableProp(constraint, *existingPropType, segment, ctx);
|
||||||
if (!blocked.empty())
|
if (!blocked.empty())
|
||||||
{
|
{
|
||||||
for (TypeId blocked : blocked)
|
for (TypeId blocked : blocked)
|
||||||
|
@ -1420,8 +1413,8 @@ bool ConstraintSolver::tryDispatch(const SetPropConstraint& c, NotNull<const Con
|
||||||
|
|
||||||
if (existingPropType)
|
if (existingPropType)
|
||||||
{
|
{
|
||||||
unify(constraint->scope, constraint->location, propType, *existingPropType);
|
unify(constraint, propType, *existingPropType);
|
||||||
unify(constraint->scope, constraint->location, *existingPropType, propType);
|
unify(constraint, *existingPropType, propType);
|
||||||
bind(c.resultType, c.subjectType);
|
bind(c.resultType, c.subjectType);
|
||||||
unblock(c.resultType, constraint->location);
|
unblock(c.resultType, constraint->location);
|
||||||
return true;
|
return true;
|
||||||
|
@ -1640,63 +1633,79 @@ bool ConstraintSolver::tryDispatch(const HasIndexerConstraint& c, NotNull<const
|
||||||
return tryDispatchHasIndexer(recursionDepth, constraint, subjectType, indexType, c.resultType);
|
return tryDispatchHasIndexer(recursionDepth, constraint, subjectType, indexType, c.resultType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::pair<bool, bool> ConstraintSolver::tryDispatchSetIndexer(NotNull<const Constraint> constraint, TypeId subjectType, TypeId indexType, TypeId propType, bool expandFreeTypeBounds)
|
||||||
|
{
|
||||||
|
if (isBlocked(subjectType))
|
||||||
|
return {block(subjectType, constraint), false};
|
||||||
|
|
||||||
|
if (auto tt = getMutable<TableType>(subjectType))
|
||||||
|
{
|
||||||
|
if (tt->indexer)
|
||||||
|
{
|
||||||
|
unify(constraint, indexType, tt->indexer->indexType);
|
||||||
|
|
||||||
|
// We have a `BoundType` check here because we must mutate only our owning `BlockedType`, not some other constraint's `BlockedType`.
|
||||||
|
// TODO: We should rather have a `bool mutateProp` parameter that is set to false if we're traversing a union or intersection type.
|
||||||
|
// The union or intersection type themselves should be the one to mutate the `propType`, not each or first `TableType` in a union/intersection type.
|
||||||
|
//
|
||||||
|
// Fixing this requires fixing other ones first.
|
||||||
|
if (!get<BoundType>(propType) && get<BlockedType>(propType))
|
||||||
|
emplaceType<BoundType>(asMutable(propType), tt->indexer->indexResultType);
|
||||||
|
|
||||||
|
return {true, true};
|
||||||
|
}
|
||||||
|
else if (tt->state == TableState::Free || tt->state == TableState::Unsealed)
|
||||||
|
{
|
||||||
|
tt->indexer = TableIndexer{indexType, propType};
|
||||||
|
return {true, true};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return {true, false};
|
||||||
|
}
|
||||||
|
else if (auto ft = getMutable<FreeType>(subjectType); ft && expandFreeTypeBounds)
|
||||||
|
{
|
||||||
|
// Setting an indexer on some fresh type means we use that fresh type in a negative position.
|
||||||
|
// Therefore, we only care about the upper bound.
|
||||||
|
//
|
||||||
|
// We'll extend the upper bound if we could dispatch, but could not find a table type to update the indexer.
|
||||||
|
auto [dispatched, found] = tryDispatchSetIndexer(constraint, ft->upperBound, indexType, propType, /*expandFreeTypeBounds=*/ false);
|
||||||
|
if (dispatched && !found)
|
||||||
|
{
|
||||||
|
TypeId tableTy = arena->addType(TableType{TableState::Sealed, TypeLevel{}, constraint->scope.get()});
|
||||||
|
TableType* tt2 = getMutable<TableType>(tableTy);
|
||||||
|
tt2->indexer = TableIndexer{indexType, propType};
|
||||||
|
|
||||||
|
ft->upperBound = simplifyIntersection(builtinTypes, arena, ft->upperBound, tableTy).result; // TODO: intersect type family or a constraint.
|
||||||
|
}
|
||||||
|
|
||||||
|
return {dispatched, found};
|
||||||
|
}
|
||||||
|
else if (auto it = get<IntersectionType>(subjectType))
|
||||||
|
{
|
||||||
|
std::pair<bool, bool> result{true, true};
|
||||||
|
for (TypeId part : it)
|
||||||
|
{
|
||||||
|
auto [dispatched, found] = tryDispatchSetIndexer(constraint, part, indexType, propType, expandFreeTypeBounds);
|
||||||
|
result.first &= dispatched;
|
||||||
|
result.second &= found;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {true, false};
|
||||||
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const SetIndexerConstraint& c, NotNull<const Constraint> constraint, bool force)
|
bool ConstraintSolver::tryDispatch(const SetIndexerConstraint& c, NotNull<const Constraint> constraint, bool force)
|
||||||
{
|
{
|
||||||
TypeId subjectType = follow(c.subjectType);
|
TypeId subjectType = follow(c.subjectType);
|
||||||
if (isBlocked(subjectType))
|
if (isBlocked(subjectType))
|
||||||
return block(subjectType, constraint);
|
return block(subjectType, constraint);
|
||||||
|
|
||||||
if (auto ft = get<FreeType>(subjectType))
|
auto [dispatched, found] = tryDispatchSetIndexer(constraint, subjectType, c.indexType, c.propType, /*expandFreeTypeBounds=*/ true);
|
||||||
{
|
if (dispatched && found)
|
||||||
Scope* scope = ft->scope;
|
|
||||||
TableType* tt = &asMutable(subjectType)->ty.emplace<TableType>(TableState::Free, TypeLevel{}, scope);
|
|
||||||
tt->indexer = TableIndexer{c.indexType, c.propType};
|
|
||||||
|
|
||||||
asMutable(c.resultType)->ty.emplace<BoundType>(subjectType);
|
|
||||||
TypeId propType = freshType(arena, builtinTypes, scope);
|
|
||||||
asMutable(c.propType)->ty.emplace<BoundType>(propType);
|
|
||||||
unblock(c.propType, constraint->location);
|
unblock(c.propType, constraint->location);
|
||||||
unblock(c.resultType, constraint->location);
|
return dispatched;
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
else if (auto tt = get<TableType>(subjectType))
|
|
||||||
{
|
|
||||||
if (tt->indexer)
|
|
||||||
{
|
|
||||||
if (isBlocked(tt->indexer->indexResultType))
|
|
||||||
return block(tt->indexer->indexResultType, constraint);
|
|
||||||
|
|
||||||
// TODO This probably has to be invariant.
|
|
||||||
unify(constraint, c.indexType, tt->indexer->indexType);
|
|
||||||
asMutable(c.propType)->ty.emplace<BoundType>(tt->indexer->indexResultType);
|
|
||||||
asMutable(c.resultType)->ty.emplace<BoundType>(subjectType);
|
|
||||||
unblock(c.propType, constraint->location);
|
|
||||||
unblock(c.resultType, constraint->location);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
else if (tt->state == TableState::Free || tt->state == TableState::Unsealed)
|
|
||||||
{
|
|
||||||
TypeId promotedIndexTy = freshType(arena, builtinTypes, tt->scope);
|
|
||||||
unify(constraint, c.indexType, promotedIndexTy);
|
|
||||||
|
|
||||||
auto mtt = getMutable<TableType>(subjectType);
|
|
||||||
mtt->indexer = TableIndexer{promotedIndexTy, c.propType};
|
|
||||||
TypeId propType = freshType(arena, builtinTypes, tt->scope);
|
|
||||||
asMutable(c.propType)->ty.emplace<BoundType>(propType);
|
|
||||||
asMutable(c.resultType)->ty.emplace<BoundType>(subjectType);
|
|
||||||
unblock(c.propType, constraint->location);
|
|
||||||
unblock(c.resultType, constraint->location);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Do not augment sealed or generic tables that lack indexers
|
|
||||||
}
|
|
||||||
|
|
||||||
asMutable(c.propType)->ty.emplace<BoundType>(builtinTypes->errorRecoveryType());
|
|
||||||
asMutable(c.resultType)->ty.emplace<BoundType>(builtinTypes->errorRecoveryType());
|
|
||||||
unblock(c.propType, constraint->location);
|
|
||||||
unblock(c.resultType, constraint->location);
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull<const Constraint> constraint)
|
bool ConstraintSolver::tryDispatch(const SingletonOrTopTypeConstraint& c, NotNull<const Constraint> constraint)
|
||||||
|
@ -1719,6 +1728,46 @@ bool ConstraintSolver::tryDispatch(const SingletonOrTopTypeConstraint& c, NotNul
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool ConstraintSolver::tryDispatchUnpack1(NotNull<const Constraint> constraint, TypeId resultTy, TypeId srcTy, bool resultIsLValue)
|
||||||
|
{
|
||||||
|
resultTy = follow(resultTy);
|
||||||
|
LUAU_ASSERT(canMutate(resultTy, constraint));
|
||||||
|
|
||||||
|
if (auto lt = getMutable<LocalType>(resultTy); resultIsLValue && lt)
|
||||||
|
{
|
||||||
|
lt->domain = simplifyUnion(builtinTypes, arena, lt->domain, srcTy).result;
|
||||||
|
LUAU_ASSERT(lt->blockCount > 0);
|
||||||
|
--lt->blockCount;
|
||||||
|
|
||||||
|
LUAU_ASSERT(0 <= lt->blockCount);
|
||||||
|
|
||||||
|
if (0 == lt->blockCount)
|
||||||
|
asMutable(resultTy)->ty.emplace<BoundType>(lt->domain);
|
||||||
|
}
|
||||||
|
else if (get<BlockedType>(resultTy))
|
||||||
|
{
|
||||||
|
if (follow(srcTy) == resultTy)
|
||||||
|
{
|
||||||
|
// It is sometimes the case that we find that a blocked type
|
||||||
|
// is only blocked on itself. This doesn't actually
|
||||||
|
// constitute any meaningful constraint, so we replace it
|
||||||
|
// with a free type.
|
||||||
|
TypeId f = freshType(arena, builtinTypes, constraint->scope);
|
||||||
|
asMutable(resultTy)->ty.emplace<BoundType>(f);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
bindBlockedType(resultTy, srcTy, srcTy, constraint);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(resultIsLValue);
|
||||||
|
unify(constraint, resultTy, srcTy);
|
||||||
|
}
|
||||||
|
|
||||||
|
unblock(resultTy, constraint->location);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull<const Constraint> constraint)
|
bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull<const Constraint> constraint)
|
||||||
{
|
{
|
||||||
TypePackId sourcePack = follow(c.sourcePack);
|
TypePackId sourcePack = follow(c.sourcePack);
|
||||||
|
@ -1741,44 +1790,6 @@ bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull<const Cons
|
||||||
auto resultIter = begin(resultPack);
|
auto resultIter = begin(resultPack);
|
||||||
auto resultEnd = end(resultPack);
|
auto resultEnd = end(resultPack);
|
||||||
|
|
||||||
auto apply = [&](TypeId resultTy, TypeId srcTy) {
|
|
||||||
resultTy = follow(resultTy);
|
|
||||||
LUAU_ASSERT(canMutate(resultTy, constraint));
|
|
||||||
|
|
||||||
if (auto lt = getMutable<LocalType>(resultTy); c.resultIsLValue && lt)
|
|
||||||
{
|
|
||||||
lt->domain = simplifyUnion(builtinTypes, arena, lt->domain, srcTy).result;
|
|
||||||
LUAU_ASSERT(lt->blockCount > 0);
|
|
||||||
--lt->blockCount;
|
|
||||||
|
|
||||||
LUAU_ASSERT(0 <= lt->blockCount);
|
|
||||||
|
|
||||||
if (0 == lt->blockCount)
|
|
||||||
asMutable(resultTy)->ty.emplace<BoundType>(lt->domain);
|
|
||||||
}
|
|
||||||
else if (get<BlockedType>(resultTy))
|
|
||||||
{
|
|
||||||
if (follow(srcTy) == resultTy)
|
|
||||||
{
|
|
||||||
// It is sometimes the case that we find that a blocked type
|
|
||||||
// is only blocked on itself. This doesn't actually
|
|
||||||
// constitute any meaningful constraint, so we replace it
|
|
||||||
// with a free type.
|
|
||||||
TypeId f = freshType(arena, builtinTypes, constraint->scope);
|
|
||||||
asMutable(resultTy)->ty.emplace<BoundType>(f);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
bindBlockedType(resultTy, srcTy, srcTy, constraint);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
LUAU_ASSERT(c.resultIsLValue);
|
|
||||||
unify(constraint, resultTy, srcTy);
|
|
||||||
}
|
|
||||||
|
|
||||||
unblock(resultTy, constraint->location);
|
|
||||||
};
|
|
||||||
|
|
||||||
size_t i = 0;
|
size_t i = 0;
|
||||||
while (resultIter != resultEnd)
|
while (resultIter != resultEnd)
|
||||||
{
|
{
|
||||||
|
@ -1795,10 +1806,10 @@ bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull<const Cons
|
||||||
if (auto ut = getMutable<UnionType>(resultTy))
|
if (auto ut = getMutable<UnionType>(resultTy))
|
||||||
{
|
{
|
||||||
for (auto opt : ut->options)
|
for (auto opt : ut->options)
|
||||||
apply(opt, srcTy);
|
tryDispatchUnpack1(constraint, opt, srcTy, c.resultIsLValue);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
apply(resultTy, srcTy);
|
tryDispatchUnpack1(constraint, resultTy, srcTy, c.resultIsLValue);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
unify(constraint, resultTy, srcTy);
|
unify(constraint, resultTy, srcTy);
|
||||||
|
@ -1836,6 +1847,11 @@ bool ConstraintSolver::tryDispatch(const UnpackConstraint& c, NotNull<const Cons
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool ConstraintSolver::tryDispatch(const Unpack1Constraint& c, NotNull<const Constraint> constraint)
|
||||||
|
{
|
||||||
|
return tryDispatchUnpack1(constraint, c.resultType, c.sourceType, c.resultIsLValue);
|
||||||
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const SetOpConstraint& c, NotNull<const Constraint> constraint, bool force)
|
bool ConstraintSolver::tryDispatch(const SetOpConstraint& c, NotNull<const Constraint> constraint, bool force)
|
||||||
{
|
{
|
||||||
bool blocked = false;
|
bool blocked = false;
|
||||||
|
@ -1914,8 +1930,8 @@ bool ConstraintSolver::tryDispatch(const ReducePackConstraint& c, NotNull<const
|
||||||
|
|
||||||
bool ConstraintSolver::tryDispatch(const EqualityConstraint& c, NotNull<const Constraint> constraint, bool force)
|
bool ConstraintSolver::tryDispatch(const EqualityConstraint& c, NotNull<const Constraint> constraint, bool force)
|
||||||
{
|
{
|
||||||
unify(constraint->scope, constraint->location, c.resultType, c.assignmentType);
|
unify(constraint, c.resultType, c.assignmentType);
|
||||||
unify(constraint->scope, constraint->location, c.assignmentType, c.resultType);
|
unify(constraint, c.assignmentType, c.resultType);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2034,7 +2050,7 @@ bool ConstraintSolver::tryDispatchIterableTable(TypeId iteratorTy, const Iterabl
|
||||||
LUAU_ASSERT(nextFn);
|
LUAU_ASSERT(nextFn);
|
||||||
const TypePackId nextRetPack = nextFn->retTypes;
|
const TypePackId nextRetPack = nextFn->retTypes;
|
||||||
|
|
||||||
pushConstraint(constraint->scope, constraint->location, UnpackConstraint{c.variables, nextRetPack});
|
pushConstraint(constraint->scope, constraint->location, UnpackConstraint{c.variables, nextRetPack, /* resultIsLValue=*/ true});
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -2115,15 +2131,15 @@ bool ConstraintSolver::tryDispatchIterableFunction(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(
|
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(NotNull<const Constraint> constraint, TypeId subjectType,
|
||||||
TypeId subjectType, const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification)
|
const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification)
|
||||||
{
|
{
|
||||||
DenseHashSet<TypeId> seen{nullptr};
|
DenseHashSet<TypeId> seen{nullptr};
|
||||||
return lookupTableProp(subjectType, propName, context, inConditional, suppressSimplification, seen);
|
return lookupTableProp(constraint, subjectType, propName, context, inConditional, suppressSimplification, seen);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(
|
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(NotNull<const Constraint> constraint, TypeId subjectType,
|
||||||
TypeId subjectType, const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification, DenseHashSet<TypeId>& seen)
|
const std::string& propName, ValueContext context, bool inConditional, bool suppressSimplification, DenseHashSet<TypeId>& seen)
|
||||||
{
|
{
|
||||||
if (seen.contains(subjectType))
|
if (seen.contains(subjectType))
|
||||||
return {};
|
return {};
|
||||||
|
@ -2197,7 +2213,7 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
}
|
}
|
||||||
else if (auto mt = get<MetatableType>(subjectType); mt && context == ValueContext::RValue)
|
else if (auto mt = get<MetatableType>(subjectType); mt && context == ValueContext::RValue)
|
||||||
{
|
{
|
||||||
auto [blocked, result] = lookupTableProp(mt->table, propName, context, inConditional, suppressSimplification, seen);
|
auto [blocked, result] = lookupTableProp(constraint, mt->table, propName, context, inConditional, suppressSimplification, seen);
|
||||||
if (!blocked.empty() || result)
|
if (!blocked.empty() || result)
|
||||||
return {blocked, result};
|
return {blocked, result};
|
||||||
|
|
||||||
|
@ -2228,10 +2244,10 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
return lookupTableProp(indexType, propName, context, inConditional, suppressSimplification, seen);
|
return lookupTableProp(constraint, indexType, propName, context, inConditional, suppressSimplification, seen);
|
||||||
}
|
}
|
||||||
else if (get<MetatableType>(mtt))
|
else if (get<MetatableType>(mtt))
|
||||||
return lookupTableProp(mtt, propName, context, inConditional, suppressSimplification, seen);
|
return lookupTableProp(constraint, mtt, propName, context, inConditional, suppressSimplification, seen);
|
||||||
}
|
}
|
||||||
else if (auto ct = get<ClassType>(subjectType))
|
else if (auto ct = get<ClassType>(subjectType))
|
||||||
{
|
{
|
||||||
|
@ -2251,14 +2267,14 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
if (indexProp == metatable->props.end())
|
if (indexProp == metatable->props.end())
|
||||||
return {{}, std::nullopt};
|
return {{}, std::nullopt};
|
||||||
|
|
||||||
return lookupTableProp(indexProp->second.type(), propName, context, inConditional, suppressSimplification, seen);
|
return lookupTableProp(constraint, indexProp->second.type(), propName, context, inConditional, suppressSimplification, seen);
|
||||||
}
|
}
|
||||||
else if (auto ft = get<FreeType>(subjectType))
|
else if (auto ft = get<FreeType>(subjectType))
|
||||||
{
|
{
|
||||||
const TypeId upperBound = follow(ft->upperBound);
|
const TypeId upperBound = follow(ft->upperBound);
|
||||||
|
|
||||||
if (get<TableType>(upperBound) || get<PrimitiveType>(upperBound))
|
if (get<TableType>(upperBound) || get<PrimitiveType>(upperBound))
|
||||||
return lookupTableProp(upperBound, propName, context, inConditional, suppressSimplification, seen);
|
return lookupTableProp(constraint, upperBound, propName, context, inConditional, suppressSimplification, seen);
|
||||||
|
|
||||||
// TODO: The upper bound could be an intersection that contains suitable tables or classes.
|
// TODO: The upper bound could be an intersection that contains suitable tables or classes.
|
||||||
|
|
||||||
|
@ -2279,7 +2295,7 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
unify(scope, Location{}, subjectType, newUpperBound);
|
unify(constraint, subjectType, newUpperBound);
|
||||||
|
|
||||||
return {{}, propType};
|
return {{}, propType};
|
||||||
}
|
}
|
||||||
|
@ -2290,7 +2306,7 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
|
|
||||||
for (TypeId ty : utv)
|
for (TypeId ty : utv)
|
||||||
{
|
{
|
||||||
auto [innerBlocked, innerResult] = lookupTableProp(ty, propName, context, inConditional, suppressSimplification, seen);
|
auto [innerBlocked, innerResult] = lookupTableProp(constraint, ty, propName, context, inConditional, suppressSimplification, seen);
|
||||||
blocked.insert(blocked.end(), innerBlocked.begin(), innerBlocked.end());
|
blocked.insert(blocked.end(), innerBlocked.begin(), innerBlocked.end());
|
||||||
if (innerResult)
|
if (innerResult)
|
||||||
options.insert(*innerResult);
|
options.insert(*innerResult);
|
||||||
|
@ -2319,7 +2335,7 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
|
|
||||||
for (TypeId ty : itv)
|
for (TypeId ty : itv)
|
||||||
{
|
{
|
||||||
auto [innerBlocked, innerResult] = lookupTableProp(ty, propName, context, inConditional, suppressSimplification, seen);
|
auto [innerBlocked, innerResult] = lookupTableProp(constraint, ty, propName, context, inConditional, suppressSimplification, seen);
|
||||||
blocked.insert(blocked.end(), innerBlocked.begin(), innerBlocked.end());
|
blocked.insert(blocked.end(), innerBlocked.begin(), innerBlocked.end());
|
||||||
if (innerResult)
|
if (innerResult)
|
||||||
options.insert(*innerResult);
|
options.insert(*innerResult);
|
||||||
|
@ -2353,39 +2369,39 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||||
return {{}, std::nullopt};
|
return {{}, std::nullopt};
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename TID>
|
template<typename TID>
|
||||||
bool ConstraintSolver::unify(NotNull<Scope> scope, Location location, TID subType, TID superType)
|
bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TID subTy, TID superTy)
|
||||||
{
|
{
|
||||||
Unifier2 u2{NotNull{arena}, builtinTypes, scope, NotNull{&iceReporter}};
|
Unifier2 u2{NotNull{arena}, builtinTypes, constraint->scope, NotNull{&iceReporter}};
|
||||||
|
|
||||||
const bool ok = u2.unify(subType, superType);
|
const bool ok = u2.unify(subTy, superTy);
|
||||||
|
|
||||||
|
for (ConstraintV& c : u2.incompleteSubtypes)
|
||||||
|
{
|
||||||
|
NotNull<Constraint> addition = pushConstraint(constraint->scope, constraint->location, std::move(c));
|
||||||
|
inheritBlocks(constraint, addition);
|
||||||
|
}
|
||||||
|
|
||||||
if (ok)
|
if (ok)
|
||||||
{
|
{
|
||||||
for (const auto& [expanded, additions] : u2.expandedFreeTypes)
|
for (const auto& [expanded, additions] : u2.expandedFreeTypes)
|
||||||
{
|
{
|
||||||
for (TypeId addition : additions)
|
for (TypeId addition : additions)
|
||||||
upperBoundContributors[expanded].push_back(std::make_pair(location, addition));
|
upperBoundContributors[expanded].push_back(std::make_pair(constraint->location, addition));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
reportError(OccursCheckFailed{}, location);
|
reportError(OccursCheckFailed{}, constraint->location);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
unblock(subType, location);
|
unblock(subTy, constraint->location);
|
||||||
unblock(superType, location);
|
unblock(superTy, constraint->location);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename TID>
|
|
||||||
bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TID subTy, TID superTy)
|
|
||||||
{
|
|
||||||
return unify(constraint->scope, constraint->location, subTy, superTy);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ConstraintSolver::bindBlockedType(TypeId blockedTy, TypeId resultTy, TypeId rootTy, NotNull<const Constraint> constraint)
|
void ConstraintSolver::bindBlockedType(TypeId blockedTy, TypeId resultTy, TypeId rootTy, NotNull<const Constraint> constraint)
|
||||||
{
|
{
|
||||||
resultTy = follow(resultTy);
|
resultTy = follow(resultTy);
|
||||||
|
@ -2761,9 +2777,6 @@ LUAU_NOINLINE void ConstraintSolver::throwUserCancelError()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instantiate private template implementations for external callers
|
// Instantiate private template implementations for external callers
|
||||||
template bool ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypeId subType, TypeId superType);
|
|
||||||
template bool ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypePackId subType, TypePackId superType);
|
|
||||||
|
|
||||||
template bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TypeId subTy, TypeId superTy);
|
template bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TypeId subTy, TypeId superTy);
|
||||||
template bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TypePackId subTy, TypePackId superTy);
|
template bool ConstraintSolver::unify(NotNull<const Constraint> constraint, TypePackId subTy, TypePackId superTy);
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,10 @@ void collectOperands(DefId def, std::vector<DefId>* operands)
|
||||||
operands->push_back(def);
|
operands->push_back(def);
|
||||||
else if (auto phi = get<Phi>(def))
|
else if (auto phi = get<Phi>(def))
|
||||||
{
|
{
|
||||||
|
// A trivial phi node has no operands to populate, so we push this definition in directly.
|
||||||
|
if (phi->operands.empty())
|
||||||
|
return operands->push_back(def);
|
||||||
|
|
||||||
for (const Def* operand : phi->operands)
|
for (const Def* operand : phi->operands)
|
||||||
collectOperands(NotNull{operand}, operands);
|
collectOperands(NotNull{operand}, operands);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,10 +14,12 @@
|
||||||
#include "Luau/Scope.h"
|
#include "Luau/Scope.h"
|
||||||
#include "Luau/StringUtils.h"
|
#include "Luau/StringUtils.h"
|
||||||
#include "Luau/TimeTrace.h"
|
#include "Luau/TimeTrace.h"
|
||||||
|
#include "Luau/TypeArena.h"
|
||||||
#include "Luau/TypeChecker2.h"
|
#include "Luau/TypeChecker2.h"
|
||||||
#include "Luau/NonStrictTypeChecker.h"
|
#include "Luau/NonStrictTypeChecker.h"
|
||||||
#include "Luau/TypeInfer.h"
|
#include "Luau/TypeInfer.h"
|
||||||
#include "Luau/Variant.h"
|
#include "Luau/Variant.h"
|
||||||
|
#include "Luau/VisitType.h"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
@ -35,6 +37,7 @@ LUAU_FASTFLAGVARIABLE(LuauKnowsTheDataModel3, false)
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJson, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJson, false)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJsonFile, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJsonFile, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(DebugLuauForbidInternalTypes, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -1160,6 +1163,56 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
|
||||||
std::move(prepareModuleScope), options, limits, recordJsonLog, writeJsonLog);
|
std::move(prepareModuleScope), options, limits, recordJsonLog, writeJsonLog);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct InternalTypeFinder : TypeOnceVisitor
|
||||||
|
{
|
||||||
|
bool visit(TypeId, const ClassType&) override
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypeId, const BlockedType&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypeId, const FreeType&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypeId, const PendingExpansionType&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypeId, const LocalType&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypePackId, const BlockedTypePack&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypePackId, const FreeTypePack&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool visit(TypePackId, const TypeFamilyInstanceTypePack&) override
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(false);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<RequireCycle>& requireCycles, NotNull<BuiltinTypes> builtinTypes,
|
ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<RequireCycle>& requireCycles, NotNull<BuiltinTypes> builtinTypes,
|
||||||
NotNull<InternalErrorReporter> iceHandler, NotNull<ModuleResolver> moduleResolver, NotNull<FileResolver> fileResolver,
|
NotNull<InternalErrorReporter> iceHandler, NotNull<ModuleResolver> moduleResolver, NotNull<FileResolver> fileResolver,
|
||||||
const ScopePtr& parentScope, std::function<void(const ModuleName&, const ScopePtr&)> prepareModuleScope, FrontendOptions options,
|
const ScopePtr& parentScope, std::function<void(const ModuleName&, const ScopePtr&)> prepareModuleScope, FrontendOptions options,
|
||||||
|
@ -1201,12 +1254,15 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
|
||||||
cg.visitModuleRoot(sourceModule.root);
|
cg.visitModuleRoot(sourceModule.root);
|
||||||
result->errors = std::move(cg.errors);
|
result->errors = std::move(cg.errors);
|
||||||
|
|
||||||
ConstraintSolver cs{NotNull{&normalizer}, NotNull(cg.rootScope), borrowConstraints(cg.constraints), result->name, moduleResolver,
|
ConstraintSolver cs{NotNull{&normalizer}, NotNull(cg.rootScope), borrowConstraints(cg.constraints), result->name, moduleResolver, requireCycles,
|
||||||
requireCycles, logger.get(), limits};
|
logger.get(), limits};
|
||||||
|
|
||||||
if (options.randomizeConstraintResolutionSeed)
|
if (options.randomizeConstraintResolutionSeed)
|
||||||
cs.randomize(*options.randomizeConstraintResolutionSeed);
|
cs.randomize(*options.randomizeConstraintResolutionSeed);
|
||||||
|
|
||||||
|
for (TypeId ty : cg.familyInstances)
|
||||||
|
cs.familyInstances.insert(ty);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
cs.run();
|
cs.run();
|
||||||
|
@ -1236,7 +1292,33 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
|
||||||
result->type = sourceModule.type;
|
result->type = sourceModule.type;
|
||||||
result->upperBoundContributors = std::move(cs.upperBoundContributors);
|
result->upperBoundContributors = std::move(cs.upperBoundContributors);
|
||||||
|
|
||||||
result->clonePublicInterface(builtinTypes, *iceHandler);
|
if (FFlag::DebugLuauForbidInternalTypes)
|
||||||
|
{
|
||||||
|
InternalTypeFinder finder;
|
||||||
|
|
||||||
|
finder.traverse(result->returnType);
|
||||||
|
|
||||||
|
for (const auto& [_, binding] : result->exportedTypeBindings)
|
||||||
|
finder.traverse(binding.type);
|
||||||
|
|
||||||
|
for (const auto& [_, ty] : result->astTypes)
|
||||||
|
finder.traverse(ty);
|
||||||
|
|
||||||
|
for (const auto& [_, ty] : result->astExpectedTypes)
|
||||||
|
finder.traverse(ty);
|
||||||
|
|
||||||
|
for (const auto& [_, tp] : result->astTypePacks)
|
||||||
|
finder.traverse(tp);
|
||||||
|
|
||||||
|
for (const auto& [_, ty] : result->astResolvedTypes)
|
||||||
|
finder.traverse(ty);
|
||||||
|
|
||||||
|
for (const auto& [_, ty] : result->astOverloadResolvedTypes)
|
||||||
|
finder.traverse(ty);
|
||||||
|
|
||||||
|
for (const auto& [_, tp] : result->astResolvedTypePacks)
|
||||||
|
finder.traverse(tp);
|
||||||
|
}
|
||||||
|
|
||||||
if (result->timeout || result->cancelled)
|
if (result->timeout || result->cancelled)
|
||||||
{
|
{
|
||||||
|
@ -1259,6 +1341,9 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
|
||||||
Luau::check(builtinTypes, NotNull{&unifierState}, NotNull{&limits}, logger.get(), sourceModule, result.get());
|
Luau::check(builtinTypes, NotNull{&unifierState}, NotNull{&limits}, logger.get(), sourceModule, result.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unfreeze(result->interfaceTypes);
|
||||||
|
result->clonePublicInterface(builtinTypes, *iceHandler);
|
||||||
|
|
||||||
// It would be nice if we could freeze the arenas before doing type
|
// It would be nice if we could freeze the arenas before doing type
|
||||||
// checking, but we'll have to do some work to get there.
|
// checking, but we'll have to do some work to get there.
|
||||||
//
|
//
|
||||||
|
@ -1295,9 +1380,8 @@ ModulePtr Frontend::check(const SourceModule& sourceModule, Mode mode, std::vect
|
||||||
}
|
}
|
||||||
catch (const InternalCompilerError& err)
|
catch (const InternalCompilerError& err)
|
||||||
{
|
{
|
||||||
InternalCompilerError augmented = err.location.has_value()
|
InternalCompilerError augmented = err.location.has_value() ? InternalCompilerError{err.message, sourceModule.name, *err.location}
|
||||||
? InternalCompilerError{err.message, sourceModule.name, *err.location}
|
: InternalCompilerError{err.message, sourceModule.name};
|
||||||
: InternalCompilerError{err.message, sourceModule.name};
|
|
||||||
throw augmented;
|
throw augmented;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,9 +196,6 @@ Module::~Module()
|
||||||
|
|
||||||
void Module::clonePublicInterface(NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
void Module::clonePublicInterface(NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||||
{
|
{
|
||||||
LUAU_ASSERT(interfaceTypes.types.empty());
|
|
||||||
LUAU_ASSERT(interfaceTypes.typePacks.empty());
|
|
||||||
|
|
||||||
CloneState cloneState{builtinTypes};
|
CloneState cloneState{builtinTypes};
|
||||||
|
|
||||||
ScopePtr moduleScope = getModuleScope();
|
ScopePtr moduleScope = getModuleScope();
|
||||||
|
|
|
@ -430,6 +430,10 @@ bool Normalizer::isInhabited(const NormalizedType* norm)
|
||||||
|
|
||||||
bool Normalizer::isInhabited(const NormalizedType* norm, Set<TypeId>& seen)
|
bool Normalizer::isInhabited(const NormalizedType* norm, Set<TypeId>& seen)
|
||||||
{
|
{
|
||||||
|
RecursionCounter _rc(&sharedState->counters.recursionCount);
|
||||||
|
if (!withinResourceLimits())
|
||||||
|
return false;
|
||||||
|
|
||||||
// If normalization failed, the type is complex, and so is more likely than not to be inhabited.
|
// If normalization failed, the type is complex, and so is more likely than not to be inhabited.
|
||||||
if (!norm)
|
if (!norm)
|
||||||
return true;
|
return true;
|
||||||
|
@ -473,6 +477,10 @@ bool Normalizer::isInhabited(TypeId ty)
|
||||||
|
|
||||||
bool Normalizer::isInhabited(TypeId ty, Set<TypeId>& seen)
|
bool Normalizer::isInhabited(TypeId ty, Set<TypeId>& seen)
|
||||||
{
|
{
|
||||||
|
RecursionCounter _rc(&sharedState->counters.recursionCount);
|
||||||
|
if (!withinResourceLimits())
|
||||||
|
return false;
|
||||||
|
|
||||||
// TODO: use log.follow(ty), CLI-64291
|
// TODO: use log.follow(ty), CLI-64291
|
||||||
ty = follow(ty);
|
ty = follow(ty);
|
||||||
|
|
||||||
|
@ -2999,6 +3007,7 @@ bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, Set<T
|
||||||
|
|
||||||
void makeTableShared(TypeId ty)
|
void makeTableShared(TypeId ty)
|
||||||
{
|
{
|
||||||
|
ty = follow(ty);
|
||||||
if (auto tableTy = getMutable<TableType>(ty))
|
if (auto tableTy = getMutable<TableType>(ty))
|
||||||
{
|
{
|
||||||
for (auto& [_, prop] : tableTy->props)
|
for (auto& [_, prop] : tableTy->props)
|
||||||
|
|
|
@ -363,6 +363,31 @@ void OverloadResolver::add(Analysis analysis, TypeId ty, ErrorVec&& errors)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we wrap calling the overload resolver in a separate function to reduce overall stack pressure in `solveFunctionCall`.
|
||||||
|
// this limits the lifetime of `OverloadResolver`, a large type, to only as long as it is actually needed.
|
||||||
|
std::optional<TypeId> selectOverload(
|
||||||
|
NotNull<BuiltinTypes> builtinTypes,
|
||||||
|
NotNull<TypeArena> arena,
|
||||||
|
NotNull<Normalizer> normalizer,
|
||||||
|
NotNull<Scope> scope,
|
||||||
|
NotNull<InternalErrorReporter> iceReporter,
|
||||||
|
NotNull<TypeCheckLimits> limits,
|
||||||
|
const Location& location,
|
||||||
|
TypeId fn,
|
||||||
|
TypePackId argsPack
|
||||||
|
)
|
||||||
|
{
|
||||||
|
OverloadResolver resolver{builtinTypes, arena, normalizer, scope, iceReporter, limits, location};
|
||||||
|
auto [status, overload] = resolver.selectOverload(fn, argsPack);
|
||||||
|
|
||||||
|
if (status == OverloadResolver::Analysis::Ok)
|
||||||
|
return overload;
|
||||||
|
|
||||||
|
if (get<AnyType>(fn) || get<FreeType>(fn))
|
||||||
|
return fn;
|
||||||
|
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
SolveResult solveFunctionCall(
|
SolveResult solveFunctionCall(
|
||||||
NotNull<TypeArena> arena,
|
NotNull<TypeArena> arena,
|
||||||
|
@ -376,17 +401,8 @@ SolveResult solveFunctionCall(
|
||||||
TypePackId argsPack
|
TypePackId argsPack
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
OverloadResolver resolver{
|
std::optional<TypeId> overloadToUse = selectOverload(builtinTypes, arena, normalizer, scope, iceReporter, limits, location, fn, argsPack);
|
||||||
builtinTypes, NotNull{arena}, normalizer, scope, iceReporter, limits, location};
|
if (!overloadToUse)
|
||||||
auto [status, overload] = resolver.selectOverload(fn, argsPack);
|
|
||||||
TypeId overloadToUse = fn;
|
|
||||||
if (status == OverloadResolver::Analysis::Ok)
|
|
||||||
overloadToUse = overload;
|
|
||||||
else if (get<AnyType>(fn) || get<FreeType>(fn))
|
|
||||||
{
|
|
||||||
// Nothing. Let's keep going
|
|
||||||
}
|
|
||||||
else
|
|
||||||
return {SolveResult::NoMatchingOverload};
|
return {SolveResult::NoMatchingOverload};
|
||||||
|
|
||||||
TypePackId resultPack = arena->freshTypePack(scope);
|
TypePackId resultPack = arena->freshTypePack(scope);
|
||||||
|
@ -394,7 +410,7 @@ SolveResult solveFunctionCall(
|
||||||
TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, scope.get(), argsPack, resultPack});
|
TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, scope.get(), argsPack, resultPack});
|
||||||
Unifier2 u2{NotNull{arena}, builtinTypes, scope, iceReporter};
|
Unifier2 u2{NotNull{arena}, builtinTypes, scope, iceReporter};
|
||||||
|
|
||||||
const bool occursCheckPassed = u2.unify(overloadToUse, inferredTy);
|
const bool occursCheckPassed = u2.unify(*overloadToUse, inferredTy);
|
||||||
|
|
||||||
if (!u2.genericSubstitutions.empty() || !u2.genericPackSubstitutions.empty())
|
if (!u2.genericSubstitutions.empty() || !u2.genericPackSubstitutions.empty())
|
||||||
{
|
{
|
||||||
|
@ -416,7 +432,7 @@ SolveResult solveFunctionCall(
|
||||||
result.typePackId = resultPack;
|
result.typePackId = resultPack;
|
||||||
|
|
||||||
LUAU_ASSERT(overloadToUse);
|
LUAU_ASSERT(overloadToUse);
|
||||||
result.overloadToUse = overloadToUse;
|
result.overloadToUse = *overloadToUse;
|
||||||
result.inferredTy = inferredTy;
|
result.inferredTy = inferredTy;
|
||||||
result.expandedFreeTypes = std::move(u2.expandedFreeTypes);
|
result.expandedFreeTypes = std::move(u2.expandedFreeTypes);
|
||||||
|
|
||||||
|
|
|
@ -654,7 +654,7 @@ SubtypingResult Subtyping::isCovariantWith(SubtypingEnvironment& env, TypePackId
|
||||||
{
|
{
|
||||||
for (size_t i = headSize; i < superHead.size(); ++i)
|
for (size_t i = headSize; i < superHead.size(); ++i)
|
||||||
results.push_back(isCovariantWith(env, vt->ty, superHead[i])
|
results.push_back(isCovariantWith(env, vt->ty, superHead[i])
|
||||||
.withSubComponent(TypePath::TypeField::Variadic)
|
.withSubPath(TypePath::PathBuilder().tail().variadic().build())
|
||||||
.withSuperComponent(TypePath::Index{i}));
|
.withSuperComponent(TypePath::Index{i}));
|
||||||
}
|
}
|
||||||
else if (auto gt = get<GenericTypePack>(*subTail))
|
else if (auto gt = get<GenericTypePack>(*subTail))
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
|
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauToStringSimpleCompositeTypesSingleLine, false)
|
LUAU_FASTFLAGVARIABLE(LuauToStringSimpleCompositeTypesSingleLine, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauStringifyCyclesRootedAtPacks, false)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Enables increasing levels of verbosity for Luau type names when stringifying.
|
* Enables increasing levels of verbosity for Luau type names when stringifying.
|
||||||
|
@ -1491,10 +1492,21 @@ ToStringResult toStringDetailed(TypePackId tp, ToStringOptions& opts)
|
||||||
else
|
else
|
||||||
tvs.stringify(tp);
|
tvs.stringify(tp);
|
||||||
|
|
||||||
if (!cycles.empty())
|
if (FFlag::LuauStringifyCyclesRootedAtPacks)
|
||||||
{
|
{
|
||||||
result.cycle = true;
|
if (!cycles.empty() || !cycleTPs.empty())
|
||||||
state.emit(" where ");
|
{
|
||||||
|
result.cycle = true;
|
||||||
|
state.emit(" where ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (!cycles.empty())
|
||||||
|
{
|
||||||
|
result.cycle = true;
|
||||||
|
state.emit(" where ");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
state.exhaustive = true;
|
state.exhaustive = true;
|
||||||
|
@ -1521,6 +1533,32 @@ ToStringResult toStringDetailed(TypePackId tp, ToStringOptions& opts)
|
||||||
semi = true;
|
semi = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (FFlag::LuauStringifyCyclesRootedAtPacks)
|
||||||
|
{
|
||||||
|
std::vector<std::pair<TypePackId, std::string>> sortedCycleTpNames{state.cycleTpNames.begin(), state.cycleTpNames.end()};
|
||||||
|
std::sort(sortedCycleTpNames.begin(), sortedCycleTpNames.end(), [](const auto& a, const auto& b) {
|
||||||
|
return a.second < b.second;
|
||||||
|
});
|
||||||
|
|
||||||
|
TypePackStringifier tps{tvs.state};
|
||||||
|
|
||||||
|
for (const auto& [cycleTp, name] : sortedCycleTpNames)
|
||||||
|
{
|
||||||
|
if (semi)
|
||||||
|
state.emit(" ; ");
|
||||||
|
|
||||||
|
state.emit(name);
|
||||||
|
state.emit(" = ");
|
||||||
|
Luau::visit(
|
||||||
|
[&tps, cycleTp = cycleTp](auto t) {
|
||||||
|
return tps(cycleTp, t);
|
||||||
|
},
|
||||||
|
cycleTp->ty);
|
||||||
|
|
||||||
|
semi = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (opts.maxTypeLength > 0 && result.name.length() > opts.maxTypeLength)
|
if (opts.maxTypeLength > 0 && result.name.length() > opts.maxTypeLength)
|
||||||
{
|
{
|
||||||
result.name += "... *TRUNCATED*";
|
result.name += "... *TRUNCATED*";
|
||||||
|
@ -1721,7 +1759,7 @@ std::string toString(const Constraint& constraint, ToStringOptions& opts)
|
||||||
{
|
{
|
||||||
std::string subStr = tos(c.subPack);
|
std::string subStr = tos(c.subPack);
|
||||||
std::string superStr = tos(c.superPack);
|
std::string superStr = tos(c.superPack);
|
||||||
return subStr + " <: " + superStr;
|
return subStr + " <...: " + superStr;
|
||||||
}
|
}
|
||||||
else if constexpr (std::is_same_v<T, GeneralizationConstraint>)
|
else if constexpr (std::is_same_v<T, GeneralizationConstraint>)
|
||||||
{
|
{
|
||||||
|
@ -1782,7 +1820,7 @@ std::string toString(const Constraint& constraint, ToStringOptions& opts)
|
||||||
}
|
}
|
||||||
else if constexpr (std::is_same_v<T, SetIndexerConstraint>)
|
else if constexpr (std::is_same_v<T, SetIndexerConstraint>)
|
||||||
{
|
{
|
||||||
return tos(c.resultType) + " ~ setIndexer " + tos(c.subjectType) + " [ " + tos(c.indexType) + " ] " + tos(c.propType);
|
return "setIndexer " + tos(c.subjectType) + " [ " + tos(c.indexType) + " ] " + tos(c.propType);
|
||||||
}
|
}
|
||||||
else if constexpr (std::is_same_v<T, SingletonOrTopTypeConstraint>)
|
else if constexpr (std::is_same_v<T, SingletonOrTopTypeConstraint>)
|
||||||
{
|
{
|
||||||
|
@ -1795,7 +1833,9 @@ std::string toString(const Constraint& constraint, ToStringOptions& opts)
|
||||||
return result + " ~ if isSingleton D then D else unknown where D = " + discriminant;
|
return result + " ~ if isSingleton D then D else unknown where D = " + discriminant;
|
||||||
}
|
}
|
||||||
else if constexpr (std::is_same_v<T, UnpackConstraint>)
|
else if constexpr (std::is_same_v<T, UnpackConstraint>)
|
||||||
return tos(c.resultPack) + " ~ unpack " + tos(c.sourcePack);
|
return tos(c.resultPack) + " ~ ...unpack " + tos(c.sourcePack);
|
||||||
|
else if constexpr (std::is_same_v<T, Unpack1Constraint>)
|
||||||
|
return tos(c.resultType) + " ~ unpack " + tos(c.sourceType);
|
||||||
else if constexpr (std::is_same_v<T, SetOpConstraint>)
|
else if constexpr (std::is_same_v<T, SetOpConstraint>)
|
||||||
{
|
{
|
||||||
const char* op = c.mode == SetOpConstraint::Union ? " | " : " & ";
|
const char* op = c.mode == SetOpConstraint::Union ? " | " : " & ";
|
||||||
|
|
|
@ -1648,7 +1648,7 @@ struct TypeChecker2
|
||||||
visit(*fn->returnAnnotation);
|
visit(*fn->returnAnnotation);
|
||||||
|
|
||||||
// If the function type has a family annotation, we need to see if we can suggest an annotation
|
// If the function type has a family annotation, we need to see if we can suggest an annotation
|
||||||
TypeFamilyReductionGuesser guesser{builtinTypes, NotNull{&normalizer}};
|
TypeFamilyReductionGuesser guesser{NotNull{&module->internalTypes}, builtinTypes, NotNull{&normalizer}};
|
||||||
for (TypeId retTy : inferredFtv->retTypes)
|
for (TypeId retTy : inferredFtv->retTypes)
|
||||||
{
|
{
|
||||||
if (get<TypeFamilyInstanceType>(follow(retTy)))
|
if (get<TypeFamilyInstanceType>(follow(retTy)))
|
||||||
|
|
|
@ -17,23 +17,32 @@
|
||||||
#include "Luau/TxnLog.h"
|
#include "Luau/TxnLog.h"
|
||||||
#include "Luau/Type.h"
|
#include "Luau/Type.h"
|
||||||
#include "Luau/TypeCheckLimits.h"
|
#include "Luau/TypeCheckLimits.h"
|
||||||
|
#include "Luau/TypeFamilyReductionGuesser.h"
|
||||||
#include "Luau/TypeFwd.h"
|
#include "Luau/TypeFwd.h"
|
||||||
#include "Luau/TypeUtils.h"
|
#include "Luau/TypeUtils.h"
|
||||||
#include "Luau/Unifier2.h"
|
#include "Luau/Unifier2.h"
|
||||||
#include "Luau/VecDeque.h"
|
#include "Luau/VecDeque.h"
|
||||||
#include "Luau/VisitType.h"
|
#include "Luau/VisitType.h"
|
||||||
|
|
||||||
|
// used to control emitting CodeTooComplex warnings on type family reduction
|
||||||
LUAU_DYNAMIC_FASTINTVARIABLE(LuauTypeFamilyGraphReductionMaximumSteps, 1'000'000);
|
LUAU_DYNAMIC_FASTINTVARIABLE(LuauTypeFamilyGraphReductionMaximumSteps, 1'000'000);
|
||||||
|
|
||||||
|
// used to control falling back to a more conservative reduction based on guessing
|
||||||
|
// when this value is set to a negative value, guessing will be totally disabled.
|
||||||
|
LUAU_DYNAMIC_FASTINTVARIABLE(LuauTypeFamilyUseGuesserDepth, -1);
|
||||||
|
|
||||||
LUAU_FASTFLAG(DebugLuauLogSolver);
|
LUAU_FASTFLAG(DebugLuauLogSolver);
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
|
||||||
|
using TypeOrTypePackIdSet = DenseHashSet<const void*>;
|
||||||
|
|
||||||
struct InstanceCollector : TypeOnceVisitor
|
struct InstanceCollector : TypeOnceVisitor
|
||||||
{
|
{
|
||||||
VecDeque<TypeId> tys;
|
VecDeque<TypeId> tys;
|
||||||
VecDeque<TypePackId> tps;
|
VecDeque<TypePackId> tps;
|
||||||
|
TypeOrTypePackIdSet shouldGuess{nullptr};
|
||||||
std::vector<TypeId> cyclicInstance;
|
std::vector<TypeId> cyclicInstance;
|
||||||
|
|
||||||
bool visit(TypeId ty, const TypeFamilyInstanceType&) override
|
bool visit(TypeId ty, const TypeFamilyInstanceType&) override
|
||||||
|
@ -44,7 +53,12 @@ struct InstanceCollector : TypeOnceVisitor
|
||||||
// in the queue. Consider Add<Add<Add<number, number>, number>, number>:
|
// in the queue. Consider Add<Add<Add<number, number>, number>, number>:
|
||||||
// we want to reduce the innermost Add<number, number> instantiation
|
// we want to reduce the innermost Add<number, number> instantiation
|
||||||
// first.
|
// first.
|
||||||
|
|
||||||
|
if (DFInt::LuauTypeFamilyUseGuesserDepth >= 0 && typeFamilyDepth > DFInt::LuauTypeFamilyUseGuesserDepth)
|
||||||
|
shouldGuess.insert(ty);
|
||||||
|
|
||||||
tys.push_front(ty);
|
tys.push_front(ty);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +83,12 @@ struct InstanceCollector : TypeOnceVisitor
|
||||||
// in the queue. Consider Add<Add<Add<number, number>, number>, number>:
|
// in the queue. Consider Add<Add<Add<number, number>, number>, number>:
|
||||||
// we want to reduce the innermost Add<number, number> instantiation
|
// we want to reduce the innermost Add<number, number> instantiation
|
||||||
// first.
|
// first.
|
||||||
|
|
||||||
|
if (DFInt::LuauTypeFamilyUseGuesserDepth >= 0 && typeFamilyDepth > DFInt::LuauTypeFamilyUseGuesserDepth)
|
||||||
|
shouldGuess.insert(tp);
|
||||||
|
|
||||||
tps.push_front(tp);
|
tps.push_front(tp);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -80,19 +99,21 @@ struct FamilyReducer
|
||||||
|
|
||||||
VecDeque<TypeId> queuedTys;
|
VecDeque<TypeId> queuedTys;
|
||||||
VecDeque<TypePackId> queuedTps;
|
VecDeque<TypePackId> queuedTps;
|
||||||
|
TypeOrTypePackIdSet shouldGuess;
|
||||||
std::vector<TypeId> cyclicTypeFamilies;
|
std::vector<TypeId> cyclicTypeFamilies;
|
||||||
DenseHashSet<const void*> irreducible{nullptr};
|
TypeOrTypePackIdSet irreducible{nullptr};
|
||||||
FamilyGraphReductionResult result;
|
FamilyGraphReductionResult result;
|
||||||
bool force = false;
|
bool force = false;
|
||||||
|
|
||||||
// Local to the constraint being reduced.
|
// Local to the constraint being reduced.
|
||||||
Location location;
|
Location location;
|
||||||
|
|
||||||
FamilyReducer(VecDeque<TypeId> queuedTys, VecDeque<TypePackId> queuedTps, std::vector<TypeId> cyclicTypes, Location location,
|
FamilyReducer(VecDeque<TypeId> queuedTys, VecDeque<TypePackId> queuedTps, TypeOrTypePackIdSet shouldGuess, std::vector<TypeId> cyclicTypes,
|
||||||
TypeFamilyContext ctx, bool force = false)
|
Location location, TypeFamilyContext ctx, bool force = false)
|
||||||
: ctx(ctx)
|
: ctx(ctx)
|
||||||
, queuedTys(std::move(queuedTys))
|
, queuedTys(std::move(queuedTys))
|
||||||
, queuedTps(std::move(queuedTps))
|
, queuedTps(std::move(queuedTps))
|
||||||
|
, shouldGuess(std::move(shouldGuess))
|
||||||
, cyclicTypeFamilies(std::move(cyclicTypes))
|
, cyclicTypeFamilies(std::move(cyclicTypes))
|
||||||
, force(force)
|
, force(force)
|
||||||
, location(location)
|
, location(location)
|
||||||
|
@ -154,13 +175,12 @@ struct FamilyReducer
|
||||||
template<typename T>
|
template<typename T>
|
||||||
void replace(T subject, T replacement)
|
void replace(T subject, T replacement)
|
||||||
{
|
{
|
||||||
if (FFlag::DebugLuauLogSolver)
|
|
||||||
printf("%s -> %s\n", toString(subject, {true}).c_str(), toString(replacement, {true}).c_str());
|
|
||||||
|
|
||||||
// TODO: This should be an ICE (CLI-100942)
|
|
||||||
if (subject->owningArena != ctx.arena.get())
|
if (subject->owningArena != ctx.arena.get())
|
||||||
ctx.ice->ice("Attempting to modify a type family instance from another arena", location);
|
ctx.ice->ice("Attempting to modify a type family instance from another arena", location);
|
||||||
|
|
||||||
|
if (FFlag::DebugLuauLogSolver)
|
||||||
|
printf("%s -> %s\n", toString(subject, {true}).c_str(), toString(replacement, {true}).c_str());
|
||||||
|
|
||||||
asMutable(subject)->ty.template emplace<Unifiable::Bound<T>>(replacement);
|
asMutable(subject)->ty.template emplace<Unifiable::Bound<T>>(replacement);
|
||||||
|
|
||||||
if constexpr (std::is_same_v<T, TypeId>)
|
if constexpr (std::is_same_v<T, TypeId>)
|
||||||
|
@ -265,6 +285,34 @@ struct FamilyReducer
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename TID>
|
||||||
|
inline bool tryGuessing(TID subject)
|
||||||
|
{
|
||||||
|
if (shouldGuess.contains(subject))
|
||||||
|
{
|
||||||
|
if (FFlag::DebugLuauLogSolver)
|
||||||
|
printf("Flagged %s for reduction with guesser.\n", toString(subject, {true}).c_str());
|
||||||
|
|
||||||
|
TypeFamilyReductionGuesser guesser{ctx.arena, ctx.builtins, ctx.normalizer};
|
||||||
|
auto guessed = guesser.guess(subject);
|
||||||
|
|
||||||
|
if (guessed)
|
||||||
|
{
|
||||||
|
if (FFlag::DebugLuauLogSolver)
|
||||||
|
printf("Selected %s as the guessed result type.\n", toString(*guessed, {true}).c_str());
|
||||||
|
|
||||||
|
replace(subject, *guessed);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FFlag::DebugLuauLogSolver)
|
||||||
|
printf("Failed to produce a guess for the result of %s.\n", toString(subject, {true}).c_str());
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void stepType()
|
void stepType()
|
||||||
{
|
{
|
||||||
TypeId subject = follow(queuedTys.front());
|
TypeId subject = follow(queuedTys.front());
|
||||||
|
@ -288,6 +336,9 @@ struct FamilyReducer
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tryGuessing(subject))
|
||||||
|
return;
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
TypeFamilyReductionResult<TypeId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
||||||
handleFamilyReduction(subject, result);
|
handleFamilyReduction(subject, result);
|
||||||
}
|
}
|
||||||
|
@ -309,6 +360,9 @@ struct FamilyReducer
|
||||||
if (!testParameters(subject, tfit))
|
if (!testParameters(subject, tfit))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
if (tryGuessing(subject))
|
||||||
|
return;
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypePackId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
TypeFamilyReductionResult<TypePackId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
||||||
handleFamilyReduction(subject, result);
|
handleFamilyReduction(subject, result);
|
||||||
}
|
}
|
||||||
|
@ -324,9 +378,9 @@ struct FamilyReducer
|
||||||
};
|
};
|
||||||
|
|
||||||
static FamilyGraphReductionResult reduceFamiliesInternal(
|
static FamilyGraphReductionResult reduceFamiliesInternal(
|
||||||
VecDeque<TypeId> queuedTys, VecDeque<TypePackId> queuedTps, std::vector<TypeId> cyclics, Location location, TypeFamilyContext ctx, bool force)
|
VecDeque<TypeId> queuedTys, VecDeque<TypePackId> queuedTps, TypeOrTypePackIdSet shouldGuess, std::vector<TypeId> cyclics, Location location, TypeFamilyContext ctx, bool force)
|
||||||
{
|
{
|
||||||
FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), std::move(cyclics), location, ctx, force};
|
FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), std::move(shouldGuess), std::move(cyclics), location, ctx, force};
|
||||||
int iterationCount = 0;
|
int iterationCount = 0;
|
||||||
|
|
||||||
while (!reducer.done())
|
while (!reducer.done())
|
||||||
|
@ -360,7 +414,7 @@ FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location,
|
||||||
if (collector.tys.empty() && collector.tps.empty())
|
if (collector.tys.empty() && collector.tps.empty())
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), std::move(collector.cyclicInstance), location, ctx, force);
|
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), std::move(collector.shouldGuess), std::move(collector.cyclicInstance), location, ctx, force);
|
||||||
}
|
}
|
||||||
|
|
||||||
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, TypeFamilyContext ctx, bool force)
|
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, TypeFamilyContext ctx, bool force)
|
||||||
|
@ -379,7 +433,7 @@ FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location locati
|
||||||
if (collector.tys.empty() && collector.tps.empty())
|
if (collector.tys.empty() && collector.tps.empty())
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), {}, location, ctx, force);
|
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), std::move(collector.shouldGuess), std::move(collector.cyclicInstance), location, ctx, force);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isPending(TypeId ty, ConstraintSolver* solver)
|
bool isPending(TypeId ty, ConstraintSolver* solver)
|
||||||
|
@ -1495,7 +1549,7 @@ void BuiltinTypeFamilies::addToScope(NotNull<TypeArena> arena, NotNull<Scope> sc
|
||||||
TypeId t = arena->addType(GenericType{"T"});
|
TypeId t = arena->addType(GenericType{"T"});
|
||||||
TypeId u = arena->addType(GenericType{"U"});
|
TypeId u = arena->addType(GenericType{"U"});
|
||||||
GenericTypeDefinition genericT{t};
|
GenericTypeDefinition genericT{t};
|
||||||
GenericTypeDefinition genericU{u};
|
GenericTypeDefinition genericU{u, {t}};
|
||||||
|
|
||||||
return TypeFun{{genericT, genericU}, arena->addType(TypeFamilyInstanceType{NotNull{family}, {t, u}, {}})};
|
return TypeFun{{genericT, genericU}, arena->addType(TypeFamilyInstanceType{NotNull{family}, {t, u}, {}})};
|
||||||
};
|
};
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
#include "Luau/VisitType.h"
|
#include "Luau/VisitType.h"
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
#include <optional>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
|
@ -64,8 +65,9 @@ struct InstanceCollector2 : TypeOnceVisitor
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
TypeFamilyReductionGuesser::TypeFamilyReductionGuesser(NotNull<BuiltinTypes> builtins, NotNull<Normalizer> normalizer)
|
TypeFamilyReductionGuesser::TypeFamilyReductionGuesser(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Normalizer> normalizer)
|
||||||
: builtins(builtins)
|
: arena(arena)
|
||||||
|
, builtins(builtins)
|
||||||
, normalizer(normalizer)
|
, normalizer(normalizer)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -87,6 +89,44 @@ void TypeFamilyReductionGuesser::dumpGuesses()
|
||||||
printf("Substitute %s for %s\n", toString(t).c_str(), toString(t_).c_str());
|
printf("Substitute %s for %s\n", toString(t).c_str(), toString(t_).c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<TypeId> TypeFamilyReductionGuesser::guess(TypeId typ)
|
||||||
|
{
|
||||||
|
std::optional<TypeId> guessedType = guessType(typ);
|
||||||
|
|
||||||
|
if (!guessedType.has_value())
|
||||||
|
return {};
|
||||||
|
|
||||||
|
TypeId guess = follow(*guessedType);
|
||||||
|
if (get<TypeFamilyInstanceType>(guess))
|
||||||
|
return {};
|
||||||
|
|
||||||
|
return guess;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<TypePackId> TypeFamilyReductionGuesser::guess(TypePackId tp)
|
||||||
|
{
|
||||||
|
auto [head, tail] = flatten(tp);
|
||||||
|
|
||||||
|
std::vector<TypeId> guessedHead;
|
||||||
|
guessedHead.reserve(head.size());
|
||||||
|
|
||||||
|
for (auto typ : head)
|
||||||
|
{
|
||||||
|
std::optional<TypeId> guessedType = guessType(typ);
|
||||||
|
|
||||||
|
if (!guessedType.has_value())
|
||||||
|
return {};
|
||||||
|
|
||||||
|
TypeId guess = follow(*guessedType);
|
||||||
|
if (get<TypeFamilyInstanceType>(guess))
|
||||||
|
return {};
|
||||||
|
|
||||||
|
guessedHead.push_back(*guessedType);
|
||||||
|
}
|
||||||
|
|
||||||
|
return arena->addTypePack(TypePack{guessedHead, tail});
|
||||||
|
}
|
||||||
|
|
||||||
TypeFamilyReductionGuessResult TypeFamilyReductionGuesser::guessTypeFamilyReductionForFunction(
|
TypeFamilyReductionGuessResult TypeFamilyReductionGuesser::guessTypeFamilyReductionForFunction(
|
||||||
const AstExprFunction& expr, const FunctionType* ftv, TypeId retTy)
|
const AstExprFunction& expr, const FunctionType* ftv, TypeId retTy)
|
||||||
{
|
{
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
#include "Luau/TypeArena.h"
|
#include "Luau/TypeArena.h"
|
||||||
#include "Luau/TypeCheckLimits.h"
|
#include "Luau/TypeCheckLimits.h"
|
||||||
#include "Luau/TypeFamily.h"
|
#include "Luau/TypeFamily.h"
|
||||||
|
#include "Luau/TypeFwd.h"
|
||||||
|
#include "Luau/TypePack.h"
|
||||||
#include "Luau/TypeUtils.h"
|
#include "Luau/TypeUtils.h"
|
||||||
#include "Luau/VisitType.h"
|
#include "Luau/VisitType.h"
|
||||||
|
|
||||||
|
@ -101,6 +103,19 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
|
||||||
if (subTy == superTy)
|
if (subTy == superTy)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
// We have potentially done some unifications while dispatching either `SubtypeConstraint` or `PackSubtypeConstraint`,
|
||||||
|
// so rather than implementing backtracking or traversing the entire type graph multiple times, we could push
|
||||||
|
// additional constraints as we discover blocked types along with their proper bounds.
|
||||||
|
//
|
||||||
|
// But we exclude these two subtyping patterns, they are tautological:
|
||||||
|
// - never <: *blocked*
|
||||||
|
// - *blocked* <: unknown
|
||||||
|
if ((get<BlockedType>(subTy) || get<BlockedType>(superTy)) && !get<NeverType>(subTy) && !get<UnknownType>(superTy))
|
||||||
|
{
|
||||||
|
incompleteSubtypes.push_back(SubtypeConstraint{subTy, superTy});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
FreeType* subFree = getMutable<FreeType>(subTy);
|
FreeType* subFree = getMutable<FreeType>(subTy);
|
||||||
FreeType* superFree = getMutable<FreeType>(superTy);
|
FreeType* superFree = getMutable<FreeType>(superTy);
|
||||||
|
|
||||||
|
@ -124,8 +139,7 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
|
||||||
}
|
}
|
||||||
else if (subFree)
|
else if (subFree)
|
||||||
{
|
{
|
||||||
subFree->upperBound = mkIntersection(subFree->upperBound, superTy);
|
return unifyFreeWithType(subTy, superTy);
|
||||||
expandedFreeTypes[subTy].push_back(superTy);
|
|
||||||
}
|
}
|
||||||
else if (superFree)
|
else if (superFree)
|
||||||
{
|
{
|
||||||
|
@ -229,6 +243,62 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If superTy is a function and subTy already has a
|
||||||
|
// potentially-compatible function in its upper bound, we assume that
|
||||||
|
// the function is not overloaded and attempt to combine superTy into
|
||||||
|
// subTy's existing function bound.
|
||||||
|
bool Unifier2::unifyFreeWithType(TypeId subTy, TypeId superTy)
|
||||||
|
{
|
||||||
|
FreeType* subFree = getMutable<FreeType>(subTy);
|
||||||
|
LUAU_ASSERT(subFree);
|
||||||
|
|
||||||
|
auto doDefault = [&]() {
|
||||||
|
subFree->upperBound = mkIntersection(subFree->upperBound, superTy);
|
||||||
|
expandedFreeTypes[subTy].push_back(superTy);
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
TypeId upperBound = follow(subFree->upperBound);
|
||||||
|
|
||||||
|
if (get<FunctionType>(upperBound))
|
||||||
|
return unify(subFree->upperBound, superTy);
|
||||||
|
|
||||||
|
const FunctionType* superFunction = get<FunctionType>(superTy);
|
||||||
|
if (!superFunction)
|
||||||
|
return doDefault();
|
||||||
|
|
||||||
|
const auto [superArgHead, superArgTail] = flatten(superFunction->argTypes);
|
||||||
|
if (superArgTail)
|
||||||
|
return doDefault();
|
||||||
|
|
||||||
|
const IntersectionType* upperBoundIntersection = get<IntersectionType>(subFree->upperBound);
|
||||||
|
if (!upperBoundIntersection)
|
||||||
|
return doDefault();
|
||||||
|
|
||||||
|
bool ok = true;
|
||||||
|
bool foundOne = false;
|
||||||
|
|
||||||
|
for (TypeId part : upperBoundIntersection->parts)
|
||||||
|
{
|
||||||
|
const FunctionType* ft = get<FunctionType>(follow(part));
|
||||||
|
if (!ft)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
const auto [subArgHead, subArgTail] = flatten(ft->argTypes);
|
||||||
|
|
||||||
|
if (!subArgTail && subArgHead.size() == superArgHead.size())
|
||||||
|
{
|
||||||
|
foundOne = true;
|
||||||
|
ok &= unify(part, superTy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (foundOne)
|
||||||
|
return ok;
|
||||||
|
else
|
||||||
|
return doDefault();
|
||||||
|
}
|
||||||
|
|
||||||
bool Unifier2::unify(TypeId subTy, const FunctionType* superFn)
|
bool Unifier2::unify(TypeId subTy, const FunctionType* superFn)
|
||||||
{
|
{
|
||||||
const FunctionType* subFn = get<FunctionType>(subTy);
|
const FunctionType* subFn = get<FunctionType>(subTy);
|
||||||
|
@ -403,6 +473,12 @@ bool Unifier2::unify(TypePackId subTp, TypePackId superTp)
|
||||||
if (subTp == superTp)
|
if (subTp == superTp)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
if (get<BlockedTypePack>(subTp) || get<BlockedTypePack>(superTp))
|
||||||
|
{
|
||||||
|
incompleteSubtypes.push_back(PackSubtypeConstraint{subTp, superTp});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
const FreeTypePack* subFree = get<FreeTypePack>(subTp);
|
const FreeTypePack* subFree = get<FreeTypePack>(subTp);
|
||||||
const FreeTypePack* superFree = get<FreeTypePack>(superTp);
|
const FreeTypePack* superFree = get<FreeTypePack>(superTp);
|
||||||
|
|
||||||
|
|
|
@ -40,6 +40,26 @@ enum class CodeGenCompilationResult
|
||||||
AllocationFailed = 9, // Native codegen failed due to an allocation error
|
AllocationFailed = 9, // Native codegen failed due to an allocation error
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct ProtoCompilationFailure
|
||||||
|
{
|
||||||
|
CodeGenCompilationResult result = CodeGenCompilationResult::Success;
|
||||||
|
|
||||||
|
std::string debugname;
|
||||||
|
int line = -1;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CompilationResult
|
||||||
|
{
|
||||||
|
CodeGenCompilationResult result = CodeGenCompilationResult::Success;
|
||||||
|
|
||||||
|
std::vector<ProtoCompilationFailure> protoFailures;
|
||||||
|
|
||||||
|
[[nodiscard]] bool hasErrors() const
|
||||||
|
{
|
||||||
|
return result != CodeGenCompilationResult::Success || !protoFailures.empty();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
struct CompilationStats
|
struct CompilationStats
|
||||||
{
|
{
|
||||||
size_t bytecodeSizeBytes = 0;
|
size_t bytecodeSizeBytes = 0;
|
||||||
|
@ -65,7 +85,8 @@ void create(lua_State* L);
|
||||||
void setNativeExecutionEnabled(lua_State* L, bool enabled);
|
void setNativeExecutionEnabled(lua_State* L, bool enabled);
|
||||||
|
|
||||||
// Builds target function and all inner functions
|
// Builds target function and all inner functions
|
||||||
CodeGenCompilationResult compile(lua_State* L, int idx, unsigned int flags = 0, CompilationStats* stats = nullptr);
|
CodeGenCompilationResult compile_DEPRECATED(lua_State* L, int idx, unsigned int flags = 0, CompilationStats* stats = nullptr);
|
||||||
|
CompilationResult compile(lua_State* L, int idx, unsigned int flags = 0, CompilationStats* stats = nullptr);
|
||||||
|
|
||||||
using AnnotatorFn = void (*)(void* context, std::string& result, int fid, int instpos);
|
using AnnotatorFn = void (*)(void* context, std::string& result, int fid, int instpos);
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,7 @@ struct NativeProtoExecDataDeleter
|
||||||
using NativeProtoExecDataPtr = std::unique_ptr<uint32_t[], NativeProtoExecDataDeleter>;
|
using NativeProtoExecDataPtr = std::unique_ptr<uint32_t[], NativeProtoExecDataDeleter>;
|
||||||
|
|
||||||
[[nodiscard]] NativeProtoExecDataPtr createNativeProtoExecData(uint32_t bytecodeInstructionCount);
|
[[nodiscard]] NativeProtoExecDataPtr createNativeProtoExecData(uint32_t bytecodeInstructionCount);
|
||||||
|
void destroyNativeProtoExecData(const uint32_t* instructionOffsets) noexcept;
|
||||||
|
|
||||||
[[nodiscard]] NativeProtoExecDataHeader& getNativeProtoExecDataHeader(uint32_t* instructionOffsets) noexcept;
|
[[nodiscard]] NativeProtoExecDataHeader& getNativeProtoExecDataHeader(uint32_t* instructionOffsets) noexcept;
|
||||||
[[nodiscard]] const NativeProtoExecDataHeader& getNativeProtoExecDataHeader(const uint32_t* instructionOffsets) noexcept;
|
[[nodiscard]] const NativeProtoExecDataHeader& getNativeProtoExecDataHeader(const uint32_t* instructionOffsets) noexcept;
|
||||||
|
|
|
@ -110,11 +110,15 @@ void* createBlockUnwindInfo(void* context, uint8_t* block, size_t blockSize, siz
|
||||||
unwind->finalize(unwindData, unwindSize, block, blockSize);
|
unwind->finalize(unwindData, unwindSize, block, blockSize);
|
||||||
|
|
||||||
#if defined(_WIN32) && defined(_M_X64)
|
#if defined(_WIN32) && defined(_M_X64)
|
||||||
|
|
||||||
|
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP | WINAPI_PARTITION_SYSTEM)
|
||||||
if (!RtlAddFunctionTable((RUNTIME_FUNCTION*)block, uint32_t(unwind->getFunctionCount()), uintptr_t(block)))
|
if (!RtlAddFunctionTable((RUNTIME_FUNCTION*)block, uint32_t(unwind->getFunctionCount()), uintptr_t(block)))
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(!"Failed to allocate function table");
|
CODEGEN_ASSERT(!"Failed to allocate function table");
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
#elif defined(__linux__) || defined(__APPLE__)
|
#elif defined(__linux__) || defined(__APPLE__)
|
||||||
if (!__register_frame)
|
if (!__register_frame)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -138,8 +142,12 @@ void* createBlockUnwindInfo(void* context, uint8_t* block, size_t blockSize, siz
|
||||||
void destroyBlockUnwindInfo(void* context, void* unwindData)
|
void destroyBlockUnwindInfo(void* context, void* unwindData)
|
||||||
{
|
{
|
||||||
#if defined(_WIN32) && defined(_M_X64)
|
#if defined(_WIN32) && defined(_M_X64)
|
||||||
|
|
||||||
|
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP | WINAPI_PARTITION_SYSTEM)
|
||||||
if (!RtlDeleteFunctionTable((RUNTIME_FUNCTION*)unwindData))
|
if (!RtlDeleteFunctionTable((RUNTIME_FUNCTION*)unwindData))
|
||||||
CODEGEN_ASSERT(!"Failed to deallocate function table");
|
CODEGEN_ASSERT(!"Failed to deallocate function table");
|
||||||
|
#endif
|
||||||
|
|
||||||
#elif defined(__linux__) || defined(__APPLE__)
|
#elif defined(__linux__) || defined(__APPLE__)
|
||||||
if (!__deregister_frame)
|
if (!__deregister_frame)
|
||||||
{
|
{
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
LUAU_FASTFLAGVARIABLE(DebugCodegenNoOpt, false)
|
LUAU_FASTFLAGVARIABLE(DebugCodegenNoOpt, false)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugCodegenOptSize, false)
|
LUAU_FASTFLAGVARIABLE(DebugCodegenOptSize, false)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugCodegenSkipNumbering, false)
|
LUAU_FASTFLAGVARIABLE(DebugCodegenSkipNumbering, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauCodegenDetailedCompilationResult, false)
|
||||||
|
|
||||||
// Per-module IR instruction count limit
|
// Per-module IR instruction count limit
|
||||||
LUAU_FASTINTVARIABLE(CodegenHeuristicsInstructionLimit, 1'048'576) // 1 M
|
LUAU_FASTINTVARIABLE(CodegenHeuristicsInstructionLimit, 1'048'576) // 1 M
|
||||||
|
@ -284,7 +285,7 @@ void onDisable(lua_State* L, Proto* proto)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t getMemorySize(lua_State* L, Proto* proto)
|
static size_t getMemorySize(lua_State* L, Proto* proto)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenHeapSizeReport);
|
CODEGEN_ASSERT(FFlag::LuauCodegenHeapSizeReport);
|
||||||
ExtraExecData* extra = getExtraExecData(proto, proto->execdata);
|
ExtraExecData* extra = getExtraExecData(proto, proto->execdata);
|
||||||
|
@ -410,8 +411,10 @@ void setNativeExecutionEnabled(lua_State* L, bool enabled)
|
||||||
L->global->ecb.enter = enabled ? onEnter : onEnterDisabled;
|
L->global->ecb.enter = enabled ? onEnter : onEnterDisabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeGenCompilationResult compile(lua_State* L, int idx, unsigned int flags, CompilationStats* stats)
|
CodeGenCompilationResult compile_DEPRECATED(lua_State* L, int idx, unsigned int flags, CompilationStats* stats)
|
||||||
{
|
{
|
||||||
|
CODEGEN_ASSERT(!FFlag::LuauCodegenDetailedCompilationResult);
|
||||||
|
|
||||||
CODEGEN_ASSERT(lua_isLfunction(L, idx));
|
CODEGEN_ASSERT(lua_isLfunction(L, idx));
|
||||||
const TValue* func = luaA_toobject(L, idx);
|
const TValue* func = luaA_toobject(L, idx);
|
||||||
|
|
||||||
|
@ -564,6 +567,167 @@ CodeGenCompilationResult compile(lua_State* L, int idx, unsigned int flags, Comp
|
||||||
return codeGenCompilationResult;
|
return codeGenCompilationResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CompilationResult compile(lua_State* L, int idx, unsigned int flags, CompilationStats* stats)
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(FFlag::LuauCodegenDetailedCompilationResult);
|
||||||
|
|
||||||
|
CompilationResult compilationResult;
|
||||||
|
|
||||||
|
CODEGEN_ASSERT(lua_isLfunction(L, idx));
|
||||||
|
const TValue* func = luaA_toobject(L, idx);
|
||||||
|
|
||||||
|
Proto* root = clvalue(func)->l.p;
|
||||||
|
|
||||||
|
if ((flags & CodeGen_OnlyNativeModules) != 0 && (root->flags & LPF_NATIVE_MODULE) == 0)
|
||||||
|
{
|
||||||
|
compilationResult.result = CodeGenCompilationResult::NotNativeModule;
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If initialization has failed, do not compile any functions
|
||||||
|
NativeState* data = getNativeState(L);
|
||||||
|
if (!data)
|
||||||
|
{
|
||||||
|
compilationResult.result = CodeGenCompilationResult::CodeGenNotInitialized;
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Proto*> protos;
|
||||||
|
gatherFunctions(protos, root, flags);
|
||||||
|
|
||||||
|
// Skip protos that have been compiled during previous invocations of CodeGen::compile
|
||||||
|
protos.erase(std::remove_if(protos.begin(), protos.end(),
|
||||||
|
[](Proto* p) {
|
||||||
|
return p == nullptr || p->execdata != nullptr;
|
||||||
|
}),
|
||||||
|
protos.end());
|
||||||
|
|
||||||
|
if (protos.empty())
|
||||||
|
{
|
||||||
|
compilationResult.result = CodeGenCompilationResult::NothingToCompile;
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stats != nullptr)
|
||||||
|
stats->functionsTotal = uint32_t(protos.size());
|
||||||
|
|
||||||
|
#if defined(__aarch64__)
|
||||||
|
static unsigned int cpuFeatures = getCpuFeaturesA64();
|
||||||
|
A64::AssemblyBuilderA64 build(/* logText= */ false, cpuFeatures);
|
||||||
|
#else
|
||||||
|
X64::AssemblyBuilderX64 build(/* logText= */ false);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
ModuleHelpers helpers;
|
||||||
|
#if defined(__aarch64__)
|
||||||
|
A64::assembleHelpers(build, helpers);
|
||||||
|
#else
|
||||||
|
X64::assembleHelpers(build, helpers);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
std::vector<OldNativeProto> results;
|
||||||
|
results.reserve(protos.size());
|
||||||
|
|
||||||
|
uint32_t totalIrInstCount = 0;
|
||||||
|
|
||||||
|
for (Proto* p : protos)
|
||||||
|
{
|
||||||
|
CodeGenCompilationResult protoResult = CodeGenCompilationResult::Success;
|
||||||
|
|
||||||
|
if (std::optional<OldNativeProto> np = createNativeFunction(build, helpers, p, totalIrInstCount, protoResult))
|
||||||
|
results.push_back(*np);
|
||||||
|
else
|
||||||
|
compilationResult.protoFailures.push_back({protoResult, p->debugname ? getstr(p->debugname) : "", p->linedefined});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Very large modules might result in overflowing a jump offset; in this case we currently abandon the entire module
|
||||||
|
if (!build.finalize())
|
||||||
|
{
|
||||||
|
for (OldNativeProto result : results)
|
||||||
|
destroyExecData(result.execdata);
|
||||||
|
|
||||||
|
compilationResult.result = CodeGenCompilationResult::CodeGenAssemblerFinalizationFailure;
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no functions were assembled, we don't need to allocate/copy executable pages for helpers
|
||||||
|
if (results.empty())
|
||||||
|
return compilationResult;
|
||||||
|
|
||||||
|
uint8_t* nativeData = nullptr;
|
||||||
|
size_t sizeNativeData = 0;
|
||||||
|
uint8_t* codeStart = nullptr;
|
||||||
|
if (!data->codeAllocator.allocate(build.data.data(), int(build.data.size()), reinterpret_cast<const uint8_t*>(build.code.data()),
|
||||||
|
int(build.code.size() * sizeof(build.code[0])), nativeData, sizeNativeData, codeStart))
|
||||||
|
{
|
||||||
|
for (OldNativeProto result : results)
|
||||||
|
destroyExecData(result.execdata);
|
||||||
|
|
||||||
|
compilationResult.result = CodeGenCompilationResult::AllocationFailed;
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (FFlag::LuauCodegenHeapSizeReport)
|
||||||
|
{
|
||||||
|
if (gPerfLogFn && results.size() > 0)
|
||||||
|
gPerfLogFn(gPerfLogContext, uintptr_t(codeStart), uint32_t(results[0].exectarget), "<luau helpers>");
|
||||||
|
|
||||||
|
for (size_t i = 0; i < results.size(); ++i)
|
||||||
|
{
|
||||||
|
uint32_t begin = uint32_t(results[i].exectarget);
|
||||||
|
uint32_t end = i + 1 < results.size() ? uint32_t(results[i + 1].exectarget) : uint32_t(build.code.size() * sizeof(build.code[0]));
|
||||||
|
CODEGEN_ASSERT(begin < end);
|
||||||
|
|
||||||
|
if (gPerfLogFn)
|
||||||
|
logPerfFunction(results[i].p, uintptr_t(codeStart) + begin, end - begin);
|
||||||
|
|
||||||
|
ExtraExecData* extra = getExtraExecData(results[i].p, results[i].execdata);
|
||||||
|
extra->codeSize = end - begin;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (gPerfLogFn && results.size() > 0)
|
||||||
|
{
|
||||||
|
gPerfLogFn(gPerfLogContext, uintptr_t(codeStart), uint32_t(results[0].exectarget), "<luau helpers>");
|
||||||
|
|
||||||
|
for (size_t i = 0; i < results.size(); ++i)
|
||||||
|
{
|
||||||
|
uint32_t begin = uint32_t(results[i].exectarget);
|
||||||
|
uint32_t end = i + 1 < results.size() ? uint32_t(results[i + 1].exectarget) : uint32_t(build.code.size() * sizeof(build.code[0]));
|
||||||
|
CODEGEN_ASSERT(begin < end);
|
||||||
|
|
||||||
|
logPerfFunction(results[i].p, uintptr_t(codeStart) + begin, end - begin);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const OldNativeProto& result : results)
|
||||||
|
{
|
||||||
|
// the memory is now managed by VM and will be freed via onDestroyFunction
|
||||||
|
result.p->execdata = result.execdata;
|
||||||
|
result.p->exectarget = uintptr_t(codeStart) + result.exectarget;
|
||||||
|
result.p->codeentry = &kCodeEntryInsn;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stats != nullptr)
|
||||||
|
{
|
||||||
|
for (const OldNativeProto& result : results)
|
||||||
|
{
|
||||||
|
stats->bytecodeSizeBytes += result.p->sizecode * sizeof(Instruction);
|
||||||
|
|
||||||
|
// Account for the native -> bytecode instruction offsets mapping:
|
||||||
|
stats->nativeMetadataSizeBytes += result.p->sizecode * sizeof(uint32_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
stats->functionsCompiled += uint32_t(results.size());
|
||||||
|
stats->nativeCodeSizeBytes += build.code.size();
|
||||||
|
stats->nativeDataSizeBytes += build.data.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
return compilationResult;
|
||||||
|
}
|
||||||
|
|
||||||
void setPerfLog(void* context, PerfLogFn logFn)
|
void setPerfLog(void* context, PerfLogFn logFn)
|
||||||
{
|
{
|
||||||
gPerfLogContext = context;
|
gPerfLogContext = context;
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
#include "Luau/UnwindBuilder.h"
|
#include "Luau/UnwindBuilder.h"
|
||||||
|
|
||||||
#include "BitUtils.h"
|
#include "BitUtils.h"
|
||||||
|
#include "CodeGenContext.h"
|
||||||
#include "CodeGenUtils.h"
|
#include "CodeGenUtils.h"
|
||||||
#include "NativeState.h"
|
#include "NativeState.h"
|
||||||
#include "EmitCommonA64.h"
|
#include "EmitCommonA64.h"
|
||||||
|
@ -290,6 +291,39 @@ bool initHeaderFunctions(NativeState& data)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool initHeaderFunctions(BaseCodeGenContext& codeGenContext)
|
||||||
|
{
|
||||||
|
AssemblyBuilderA64 build(/* logText= */ false);
|
||||||
|
UnwindBuilder& unwind = *codeGenContext.unwindBuilder.get();
|
||||||
|
|
||||||
|
unwind.startInfo(UnwindBuilder::A64);
|
||||||
|
|
||||||
|
EntryLocations entryLocations = buildEntryFunction(build, unwind);
|
||||||
|
|
||||||
|
build.finalize();
|
||||||
|
|
||||||
|
unwind.finishInfo();
|
||||||
|
|
||||||
|
CODEGEN_ASSERT(build.data.empty());
|
||||||
|
|
||||||
|
uint8_t* codeStart = nullptr;
|
||||||
|
if (!codeGenContext.codeAllocator.allocate(build.data.data(), int(build.data.size()), reinterpret_cast<const uint8_t*>(build.code.data()),
|
||||||
|
int(build.code.size() * sizeof(build.code[0])), codeGenContext.gateData, codeGenContext.gateDataSize, codeStart))
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(!"Failed to create entry function");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the offset at the begining so that functions in new blocks will not overlay the locations
|
||||||
|
// specified by the unwind information of the entry function
|
||||||
|
unwind.setBeginOffset(build.getLabelOffset(entryLocations.prologueEnd));
|
||||||
|
|
||||||
|
codeGenContext.context.gateEntry = codeStart + build.getLabelOffset(entryLocations.start);
|
||||||
|
codeGenContext.context.gateExit = codeStart + build.getLabelOffset(entryLocations.epilogueStart);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers)
|
void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers)
|
||||||
{
|
{
|
||||||
if (build.logText)
|
if (build.logText)
|
||||||
|
|
|
@ -6,6 +6,7 @@ namespace Luau
|
||||||
namespace CodeGen
|
namespace CodeGen
|
||||||
{
|
{
|
||||||
|
|
||||||
|
class BaseCodeGenContext;
|
||||||
struct NativeState;
|
struct NativeState;
|
||||||
struct ModuleHelpers;
|
struct ModuleHelpers;
|
||||||
|
|
||||||
|
@ -15,6 +16,7 @@ namespace A64
|
||||||
class AssemblyBuilderA64;
|
class AssemblyBuilderA64;
|
||||||
|
|
||||||
bool initHeaderFunctions(NativeState& data);
|
bool initHeaderFunctions(NativeState& data);
|
||||||
|
bool initHeaderFunctions(BaseCodeGenContext& codeGenContext);
|
||||||
void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers);
|
void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers);
|
||||||
|
|
||||||
} // namespace A64
|
} // namespace A64
|
||||||
|
|
226
CodeGen/src/CodeGenContext.cpp
Normal file
226
CodeGen/src/CodeGenContext.cpp
Normal file
|
@ -0,0 +1,226 @@
|
||||||
|
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||||
|
#include "CodeGenContext.h"
|
||||||
|
|
||||||
|
#include "CodeGenA64.h"
|
||||||
|
#include "CodeGenX64.h"
|
||||||
|
|
||||||
|
#include "Luau/CodeBlockUnwind.h"
|
||||||
|
#include "Luau/UnwindBuilder.h"
|
||||||
|
#include "Luau/UnwindBuilderDwarf2.h"
|
||||||
|
#include "Luau/UnwindBuilderWin.h"
|
||||||
|
|
||||||
|
LUAU_FASTFLAG(LuauCodegenHeapSizeReport)
|
||||||
|
|
||||||
|
LUAU_FASTINT(LuauCodeGenBlockSize)
|
||||||
|
LUAU_FASTINT(LuauCodeGenMaxTotalSize)
|
||||||
|
|
||||||
|
namespace Luau
|
||||||
|
{
|
||||||
|
namespace CodeGen
|
||||||
|
{
|
||||||
|
|
||||||
|
// From CodeGen.cpp
|
||||||
|
extern void* gPerfLogContext;
|
||||||
|
extern PerfLogFn gPerfLogFn;
|
||||||
|
|
||||||
|
BaseCodeGenContext::BaseCodeGenContext(size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
: codeAllocator{blockSize, maxTotalSize, allocationCallback, allocationCallbackContext}
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(isSupported());
|
||||||
|
|
||||||
|
#if defined(_WIN32)
|
||||||
|
unwindBuilder = std::make_unique<UnwindBuilderWin>();
|
||||||
|
#else
|
||||||
|
unwindBuilder = std::make_unique<UnwindBuilderDwarf2>();
|
||||||
|
#endif
|
||||||
|
|
||||||
|
codeAllocator.context = unwindBuilder.get();
|
||||||
|
codeAllocator.createBlockUnwindInfo = createBlockUnwindInfo;
|
||||||
|
codeAllocator.destroyBlockUnwindInfo = destroyBlockUnwindInfo;
|
||||||
|
|
||||||
|
initFunctions(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] bool BaseCodeGenContext::initHeaderFunctions()
|
||||||
|
{
|
||||||
|
#if defined(__x86_64__) || defined(_M_X64)
|
||||||
|
if (!X64::initHeaderFunctions(*this))
|
||||||
|
return false;
|
||||||
|
#elif defined(__aarch64__)
|
||||||
|
if (!A64::initHeaderFunctions(*this))
|
||||||
|
return false;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if (gPerfLogFn)
|
||||||
|
gPerfLogFn(gPerfLogContext, uintptr_t(context.gateEntry), 4096, "<luau gate>");
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
StandaloneCodeGenContext::StandaloneCodeGenContext(
|
||||||
|
size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
: BaseCodeGenContext{blockSize, maxTotalSize, allocationCallback, allocationCallbackContext}
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void StandaloneCodeGenContext::compileOrBindModule(const ModuleId&, lua_State*, int, unsigned int, CompilationStats*) {}
|
||||||
|
|
||||||
|
void StandaloneCodeGenContext::onCloseState() noexcept
|
||||||
|
{
|
||||||
|
// The StandaloneCodeGenContext is owned by the one VM that owns it, so when
|
||||||
|
// that VM is destroyed, we destroy *this as well:
|
||||||
|
delete this;
|
||||||
|
}
|
||||||
|
|
||||||
|
void StandaloneCodeGenContext::onDestroyFunction(void* execdata) noexcept
|
||||||
|
{
|
||||||
|
destroyNativeProtoExecData(static_cast<uint32_t*>(execdata));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SharedCodeGenContext::SharedCodeGenContext(
|
||||||
|
size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
: BaseCodeGenContext{blockSize, maxTotalSize, allocationCallback, allocationCallbackContext}
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void SharedCodeGenContext::compileOrBindModule(const ModuleId&, lua_State*, int, unsigned int, CompilationStats*) {}
|
||||||
|
|
||||||
|
void SharedCodeGenContext::onCloseState() noexcept
|
||||||
|
{
|
||||||
|
// The lifetime of the SharedCodeGenContext is managed separately from the
|
||||||
|
// VMs that use it. When a VM is destroyed, we don't need to do anything
|
||||||
|
// here.
|
||||||
|
}
|
||||||
|
|
||||||
|
void SharedCodeGenContext::onDestroyFunction(void* execdata) noexcept
|
||||||
|
{
|
||||||
|
getNativeProtoExecDataHeader(static_cast<const uint32_t*>(execdata)).nativeModule->release();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext()
|
||||||
|
{
|
||||||
|
return createSharedCodeGenContext(size_t(FInt::LuauCodeGenBlockSize), size_t(FInt::LuauCodeGenMaxTotalSize), nullptr, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext(AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
{
|
||||||
|
return createSharedCodeGenContext(
|
||||||
|
size_t(FInt::LuauCodeGenBlockSize), size_t(FInt::LuauCodeGenMaxTotalSize), allocationCallback, allocationCallbackContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext(
|
||||||
|
size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
{
|
||||||
|
UniqueSharedCodeGenContext codeGenContext{new SharedCodeGenContext{blockSize, maxTotalSize, nullptr, nullptr}};
|
||||||
|
|
||||||
|
if (!codeGenContext->initHeaderFunctions())
|
||||||
|
return {};
|
||||||
|
|
||||||
|
return codeGenContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
void destroySharedCodeGenContext(const SharedCodeGenContext* codeGenContext) noexcept
|
||||||
|
{
|
||||||
|
delete codeGenContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SharedCodeGenContextDeleter::operator()(const SharedCodeGenContext* codeGenContext) const noexcept
|
||||||
|
{
|
||||||
|
destroySharedCodeGenContext(codeGenContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[[nodiscard]] static BaseCodeGenContext* getCodeGenContext(lua_State* L) noexcept
|
||||||
|
{
|
||||||
|
return static_cast<BaseCodeGenContext*>(L->global->ecb.context);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void onCloseState(lua_State* L) noexcept
|
||||||
|
{
|
||||||
|
getCodeGenContext(L)->onCloseState();
|
||||||
|
L->global->ecb = lua_ExecutionCallbacks{};
|
||||||
|
}
|
||||||
|
|
||||||
|
static void onDestroyFunction(lua_State* L, Proto* proto) noexcept
|
||||||
|
{
|
||||||
|
getCodeGenContext(L)->onDestroyFunction(proto->execdata);
|
||||||
|
proto->execdata = nullptr;
|
||||||
|
proto->exectarget = 0;
|
||||||
|
proto->codeentry = proto->code;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int onEnter(lua_State* L, Proto* proto)
|
||||||
|
{
|
||||||
|
BaseCodeGenContext* codeGenContext = getCodeGenContext(L);
|
||||||
|
|
||||||
|
CODEGEN_ASSERT(proto->execdata);
|
||||||
|
CODEGEN_ASSERT(L->ci->savedpc >= proto->code && L->ci->savedpc < proto->code + proto->sizecode);
|
||||||
|
|
||||||
|
uintptr_t target = proto->exectarget + static_cast<uint32_t*>(proto->execdata)[L->ci->savedpc - proto->code];
|
||||||
|
|
||||||
|
// Returns 1 to finish the function in the VM
|
||||||
|
return GateFn(codeGenContext->context.gateEntry)(L, proto, target, &codeGenContext->context);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defined in CodeGen.cpp
|
||||||
|
void onDisable(lua_State* L, Proto* proto);
|
||||||
|
|
||||||
|
static size_t getMemorySize(lua_State* L, Proto* proto)
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(FFlag::LuauCodegenHeapSizeReport);
|
||||||
|
|
||||||
|
const NativeProtoExecDataHeader& execDataHeader = getNativeProtoExecDataHeader(static_cast<const uint32_t*>(proto->execdata));
|
||||||
|
|
||||||
|
const size_t execDataSize = sizeof(NativeProtoExecDataHeader) + execDataHeader.bytecodeInstructionCount * sizeof(Instruction);
|
||||||
|
|
||||||
|
// While execDataSize is exactly the size of the allocation we made and hold for 'execdata' field, the code size is approximate
|
||||||
|
// This is because code+data page is shared and owned by all Proto from a single module and each one can keep the whole region alive
|
||||||
|
// So individual Proto being freed by GC will not reflect memory use by native code correctly
|
||||||
|
return execDataSize + execDataHeader.nativeCodeSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void initializeExecutionCallbacks(lua_State* L, BaseCodeGenContext* codeGenContext) noexcept
|
||||||
|
{
|
||||||
|
lua_ExecutionCallbacks* ecb = &L->global->ecb;
|
||||||
|
|
||||||
|
ecb->context = codeGenContext;
|
||||||
|
ecb->close = onCloseState;
|
||||||
|
ecb->destroy = onDestroyFunction;
|
||||||
|
ecb->enter = onEnter;
|
||||||
|
ecb->disable = onDisable;
|
||||||
|
|
||||||
|
if (FFlag::LuauCodegenHeapSizeReport)
|
||||||
|
ecb->getmemorysize = getMemorySize;
|
||||||
|
}
|
||||||
|
|
||||||
|
void create_NEW(lua_State* L)
|
||||||
|
{
|
||||||
|
return create_NEW(L, size_t(FInt::LuauCodeGenBlockSize), size_t(FInt::LuauCodeGenMaxTotalSize), nullptr, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
void create_NEW(lua_State* L, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
{
|
||||||
|
return create_NEW(L, size_t(FInt::LuauCodeGenBlockSize), size_t(FInt::LuauCodeGenMaxTotalSize), allocationCallback, allocationCallbackContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
void create_NEW(lua_State* L, size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext)
|
||||||
|
{
|
||||||
|
std::unique_ptr<StandaloneCodeGenContext> codeGenContext =
|
||||||
|
std::make_unique<StandaloneCodeGenContext>(blockSize, maxTotalSize, allocationCallback, allocationCallbackContext);
|
||||||
|
|
||||||
|
if (!codeGenContext->initHeaderFunctions())
|
||||||
|
return;
|
||||||
|
|
||||||
|
initializeExecutionCallbacks(L, codeGenContext.release());
|
||||||
|
}
|
||||||
|
|
||||||
|
void create_NEW(lua_State* L, SharedCodeGenContext* codeGenContext)
|
||||||
|
{
|
||||||
|
initializeExecutionCallbacks(L, codeGenContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace CodeGen
|
||||||
|
} // namespace Luau
|
114
CodeGen/src/CodeGenContext.h
Normal file
114
CodeGen/src/CodeGenContext.h
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "Luau/SharedCodeAllocator.h"
|
||||||
|
|
||||||
|
#include "NativeState.h"
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
#include <stdint.h>
|
||||||
|
|
||||||
|
namespace Luau
|
||||||
|
{
|
||||||
|
namespace CodeGen
|
||||||
|
{
|
||||||
|
|
||||||
|
// The "code-gen context" maintains the native code-gen state. There are two
|
||||||
|
// implementations. The StandaloneCodeGenContext is a VM-specific context type.
|
||||||
|
// It is the "simple" implementation that can be used when native code-gen is
|
||||||
|
// used with a single Luau VM. The SharedCodeGenContext supports use from
|
||||||
|
// multiple Luau VMs concurrently, and allows for sharing of executable native
|
||||||
|
// code and related metadata.
|
||||||
|
|
||||||
|
class BaseCodeGenContext
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
BaseCodeGenContext(size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
[[nodiscard]] bool initHeaderFunctions();
|
||||||
|
|
||||||
|
virtual void compileOrBindModule(const ModuleId& moduleId, lua_State* L, int idx, unsigned int flags, CompilationStats* stats) = 0;
|
||||||
|
|
||||||
|
virtual void onCloseState() noexcept = 0;
|
||||||
|
virtual void onDestroyFunction(void* execdata) noexcept = 0;
|
||||||
|
|
||||||
|
CodeAllocator codeAllocator;
|
||||||
|
std::unique_ptr<UnwindBuilder> unwindBuilder;
|
||||||
|
|
||||||
|
uint8_t* gateData = nullptr;
|
||||||
|
size_t gateDataSize = 0;
|
||||||
|
|
||||||
|
NativeContext context;
|
||||||
|
};
|
||||||
|
|
||||||
|
class StandaloneCodeGenContext final : public BaseCodeGenContext
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
StandaloneCodeGenContext(size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
virtual void compileOrBindModule(const ModuleId& moduleId, lua_State* L, int idx, unsigned int flags, CompilationStats* stats) override;
|
||||||
|
|
||||||
|
virtual void onCloseState() noexcept override;
|
||||||
|
virtual void onDestroyFunction(void* execdata) noexcept override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
};
|
||||||
|
|
||||||
|
class SharedCodeGenContext final : public BaseCodeGenContext
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
SharedCodeGenContext(size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
virtual void compileOrBindModule(const ModuleId& moduleId, lua_State* L, int idx, unsigned int flags, CompilationStats* stats) override;
|
||||||
|
|
||||||
|
virtual void onCloseState() noexcept override;
|
||||||
|
virtual void onDestroyFunction(void* execdata) noexcept override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
SharedCodeAllocator sharedAllocator;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// The following will become the public interface, and can be moved into
|
||||||
|
// CodeGen.h after the shared allocator work is complete. When the old
|
||||||
|
// implementation is removed, the _NEW suffix can be dropped from these
|
||||||
|
// functions.
|
||||||
|
|
||||||
|
class SharedCodeGenContext;
|
||||||
|
|
||||||
|
struct SharedCodeGenContextDeleter
|
||||||
|
{
|
||||||
|
void operator()(const SharedCodeGenContext* context) const noexcept;
|
||||||
|
};
|
||||||
|
|
||||||
|
using UniqueSharedCodeGenContext = std::unique_ptr<SharedCodeGenContext, SharedCodeGenContextDeleter>;
|
||||||
|
|
||||||
|
// Creates a new SharedCodeGenContext that can be used by multiple Luau VMs
|
||||||
|
// concurrently, using either the default allocator parameters or custom
|
||||||
|
// allocator parameters.
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext();
|
||||||
|
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext(AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
[[nodiscard]] UniqueSharedCodeGenContext createSharedCodeGenContext(
|
||||||
|
size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
// Destroys the provided SharedCodeGenContext. All Luau VMs using the
|
||||||
|
// SharedCodeGenContext must be destroyed before this function is called.
|
||||||
|
void destroySharedCodeGenContext(const SharedCodeGenContext* codeGenContext) noexcept;
|
||||||
|
|
||||||
|
// Initializes native code-gen on the provided Luau VM, using a VM-specific
|
||||||
|
// code-gen context and either the default allocator parameters or custom
|
||||||
|
// allocator parameters.
|
||||||
|
void create_NEW(lua_State* L);
|
||||||
|
void create_NEW(lua_State* L, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
void create_NEW(lua_State* L, size_t blockSize, size_t maxTotalSize, AllocationCallback* allocationCallback, void* allocationCallbackContext);
|
||||||
|
|
||||||
|
// Initializes native code-gen on the provided Luau VM, using the provided
|
||||||
|
// SharedCodeGenContext. Note that after this function is called, the
|
||||||
|
// SharedCodeGenContext must not be destroyed until after the Luau VM L is
|
||||||
|
// destroyed via lua_close.
|
||||||
|
void create_NEW(lua_State* L, SharedCodeGenContext* codeGenContext);
|
||||||
|
|
||||||
|
} // namespace CodeGen
|
||||||
|
} // namespace Luau
|
|
@ -4,6 +4,7 @@
|
||||||
#include "Luau/AssemblyBuilderX64.h"
|
#include "Luau/AssemblyBuilderX64.h"
|
||||||
#include "Luau/UnwindBuilder.h"
|
#include "Luau/UnwindBuilder.h"
|
||||||
|
|
||||||
|
#include "CodeGenContext.h"
|
||||||
#include "NativeState.h"
|
#include "NativeState.h"
|
||||||
#include "EmitCommonX64.h"
|
#include "EmitCommonX64.h"
|
||||||
|
|
||||||
|
@ -218,6 +219,39 @@ bool initHeaderFunctions(NativeState& data)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool initHeaderFunctions(BaseCodeGenContext& codeGenContext)
|
||||||
|
{
|
||||||
|
AssemblyBuilderX64 build(/* logText= */ false);
|
||||||
|
UnwindBuilder& unwind = *codeGenContext.unwindBuilder.get();
|
||||||
|
|
||||||
|
unwind.startInfo(UnwindBuilder::X64);
|
||||||
|
|
||||||
|
EntryLocations entryLocations = buildEntryFunction(build, unwind);
|
||||||
|
|
||||||
|
build.finalize();
|
||||||
|
|
||||||
|
unwind.finishInfo();
|
||||||
|
|
||||||
|
CODEGEN_ASSERT(build.data.empty());
|
||||||
|
|
||||||
|
uint8_t* codeStart = nullptr;
|
||||||
|
if (!codeGenContext.codeAllocator.allocate(build.data.data(), int(build.data.size()), build.code.data(), int(build.code.size()),
|
||||||
|
codeGenContext.gateData, codeGenContext.gateDataSize, codeStart))
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(!"Failed to create entry function");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the offset at the begining so that functions in new blocks will not overlay the locations
|
||||||
|
// specified by the unwind information of the entry function
|
||||||
|
unwind.setBeginOffset(build.getLabelOffset(entryLocations.prologueEnd));
|
||||||
|
|
||||||
|
codeGenContext.context.gateEntry = codeStart + build.getLabelOffset(entryLocations.start);
|
||||||
|
codeGenContext.context.gateExit = codeStart + build.getLabelOffset(entryLocations.epilogueStart);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
|
void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
|
||||||
{
|
{
|
||||||
if (build.logText)
|
if (build.logText)
|
||||||
|
|
|
@ -6,6 +6,7 @@ namespace Luau
|
||||||
namespace CodeGen
|
namespace CodeGen
|
||||||
{
|
{
|
||||||
|
|
||||||
|
class BaseCodeGenContext;
|
||||||
struct NativeState;
|
struct NativeState;
|
||||||
struct ModuleHelpers;
|
struct ModuleHelpers;
|
||||||
|
|
||||||
|
@ -15,6 +16,7 @@ namespace X64
|
||||||
class AssemblyBuilderX64;
|
class AssemblyBuilderX64;
|
||||||
|
|
||||||
bool initHeaderFunctions(NativeState& data);
|
bool initHeaderFunctions(NativeState& data);
|
||||||
|
bool initHeaderFunctions(BaseCodeGenContext& codeGenContext);
|
||||||
void assembleHelpers(AssemblyBuilderX64& build, ModuleHelpers& helpers);
|
void assembleHelpers(AssemblyBuilderX64& build, ModuleHelpers& helpers);
|
||||||
|
|
||||||
} // namespace X64
|
} // namespace X64
|
||||||
|
|
|
@ -11,11 +11,10 @@
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
#include "lgc.h"
|
#include "lgc.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodeGenVectorA64, false)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodeGenOptVecA64, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodeGenOptVecA64, false)
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenCheckTruthyFormB)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -731,148 +730,35 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenVectorA64)
|
build.fadd(inst.regA64, regOp(inst.a), regOp(inst.b));
|
||||||
{
|
|
||||||
build.fadd(inst.regA64, regOp(inst.a), regOp(inst.b));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RegisterA64 tempa = regs.allocTemp(KindA64::s);
|
|
||||||
RegisterA64 tempb = regs.allocTemp(KindA64::s);
|
|
||||||
|
|
||||||
for (uint8_t i = 0; i < 3; i++)
|
|
||||||
{
|
|
||||||
build.dup_4s(tempa, regOp(inst.a), i);
|
|
||||||
build.dup_4s(tempb, regOp(inst.b), i);
|
|
||||||
build.fadd(tempa, tempa, tempb);
|
|
||||||
build.ins_4s(inst.regA64, i, castReg(KindA64::q, tempa), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::SUB_VEC:
|
case IrCmd::SUB_VEC:
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenVectorA64)
|
build.fsub(inst.regA64, regOp(inst.a), regOp(inst.b));
|
||||||
{
|
|
||||||
build.fsub(inst.regA64, regOp(inst.a), regOp(inst.b));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RegisterA64 tempa = regs.allocTemp(KindA64::s);
|
|
||||||
RegisterA64 tempb = regs.allocTemp(KindA64::s);
|
|
||||||
|
|
||||||
for (uint8_t i = 0; i < 3; i++)
|
|
||||||
{
|
|
||||||
build.dup_4s(tempa, regOp(inst.a), i);
|
|
||||||
build.dup_4s(tempb, regOp(inst.b), i);
|
|
||||||
build.fsub(tempa, tempa, tempb);
|
|
||||||
build.ins_4s(inst.regA64, i, castReg(KindA64::q, tempa), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::MUL_VEC:
|
case IrCmd::MUL_VEC:
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenVectorA64)
|
build.fmul(inst.regA64, regOp(inst.a), regOp(inst.b));
|
||||||
{
|
|
||||||
build.fmul(inst.regA64, regOp(inst.a), regOp(inst.b));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RegisterA64 tempa = regs.allocTemp(KindA64::s);
|
|
||||||
RegisterA64 tempb = regs.allocTemp(KindA64::s);
|
|
||||||
|
|
||||||
for (uint8_t i = 0; i < 3; i++)
|
|
||||||
{
|
|
||||||
build.dup_4s(tempa, regOp(inst.a), i);
|
|
||||||
build.dup_4s(tempb, regOp(inst.b), i);
|
|
||||||
build.fmul(tempa, tempa, tempb);
|
|
||||||
build.ins_4s(inst.regA64, i, castReg(KindA64::q, tempa), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::DIV_VEC:
|
case IrCmd::DIV_VEC:
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a, inst.b});
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenVectorA64)
|
build.fdiv(inst.regA64, regOp(inst.a), regOp(inst.b));
|
||||||
{
|
|
||||||
build.fdiv(inst.regA64, regOp(inst.a), regOp(inst.b));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RegisterA64 tempa = regs.allocTemp(KindA64::s);
|
|
||||||
RegisterA64 tempb = regs.allocTemp(KindA64::s);
|
|
||||||
|
|
||||||
for (uint8_t i = 0; i < 3; i++)
|
|
||||||
{
|
|
||||||
build.dup_4s(tempa, regOp(inst.a), i);
|
|
||||||
build.dup_4s(tempb, regOp(inst.b), i);
|
|
||||||
build.fdiv(tempa, tempa, tempb);
|
|
||||||
build.ins_4s(inst.regA64, i, castReg(KindA64::q, tempa), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::UNM_VEC:
|
case IrCmd::UNM_VEC:
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a});
|
inst.regA64 = regs.allocReuse(KindA64::q, index, {inst.a});
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenVectorA64)
|
build.fneg(inst.regA64, regOp(inst.a));
|
||||||
{
|
|
||||||
build.fneg(inst.regA64, regOp(inst.a));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RegisterA64 tempa = regs.allocTemp(KindA64::s);
|
|
||||||
|
|
||||||
for (uint8_t i = 0; i < 3; i++)
|
|
||||||
{
|
|
||||||
build.dup_4s(tempa, regOp(inst.a), i);
|
|
||||||
build.fneg(tempa, tempa);
|
|
||||||
build.ins_4s(inst.regA64, i, castReg(KindA64::q, tempa), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::NOT_ANY:
|
case IrCmd::NOT_ANY:
|
||||||
|
@ -1232,7 +1118,7 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
{
|
{
|
||||||
inst.regA64 = regs.allocReg(KindA64::q, index);
|
inst.regA64 = regs.allocReg(KindA64::q, index);
|
||||||
|
|
||||||
if (FFlag::LuauCodeGenOptVecA64 && FFlag::LuauCodegenVectorTag2 && inst.a.kind == IrOpKind::Constant)
|
if (FFlag::LuauCodeGenOptVecA64 && inst.a.kind == IrOpKind::Constant)
|
||||||
{
|
{
|
||||||
float value = float(doubleOp(inst.a));
|
float value = float(doubleOp(inst.a));
|
||||||
uint32_t asU32;
|
uint32_t asU32;
|
||||||
|
@ -1256,16 +1142,9 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
{
|
{
|
||||||
RegisterA64 tempd = tempDouble(inst.a);
|
RegisterA64 tempd = tempDouble(inst.a);
|
||||||
RegisterA64 temps = castReg(KindA64::s, tempd);
|
RegisterA64 temps = castReg(KindA64::s, tempd);
|
||||||
RegisterA64 tempw = regs.allocTemp(KindA64::w);
|
|
||||||
|
|
||||||
build.fcvt(temps, tempd);
|
build.fcvt(temps, tempd);
|
||||||
build.dup_4s(inst.regA64, castReg(KindA64::q, temps), 0);
|
build.dup_4s(inst.regA64, castReg(KindA64::q, temps), 0);
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
{
|
|
||||||
build.mov(tempw, LUA_TVECTOR);
|
|
||||||
build.ins_4s(inst.regA64, tempw, 3);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -1568,7 +1447,15 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
}
|
}
|
||||||
|
|
||||||
// fail to fallback on 'false' boolean value (falsy)
|
// fail to fallback on 'false' boolean value (falsy)
|
||||||
build.cbz(regOp(inst.b), target);
|
if (!FFlag::LuauCodegenCheckTruthyFormB || inst.b.kind != IrOpKind::Constant)
|
||||||
|
{
|
||||||
|
build.cbz(regOp(inst.b), target);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (intOp(inst.b) == 0)
|
||||||
|
build.b(target);
|
||||||
|
}
|
||||||
|
|
||||||
if (inst.a.kind != IrOpKind::Constant)
|
if (inst.a.kind != IrOpKind::Constant)
|
||||||
build.setLabel(skip);
|
build.setLabel(skip);
|
||||||
|
|
|
@ -15,9 +15,9 @@
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
#include "lgc.h"
|
#include "lgc.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenVectorOptAnd, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenVectorOptAnd, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenSmallerUnm, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenSmallerUnm, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauCodegenCheckTruthyFormB, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -631,9 +631,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
||||||
build.vaddps(inst.regX64, tmp1.reg, tmp2.reg);
|
build.vaddps(inst.regX64, tmp1.reg, tmp2.reg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vorps(inst.regX64, inst.regX64, vectorOrMaskOp());
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::SUB_VEC:
|
case IrCmd::SUB_VEC:
|
||||||
|
@ -660,9 +657,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
||||||
build.vsubps(inst.regX64, tmp1.reg, tmp2.reg);
|
build.vsubps(inst.regX64, tmp1.reg, tmp2.reg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vorps(inst.regX64, inst.regX64, vectorOrMaskOp());
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::MUL_VEC:
|
case IrCmd::MUL_VEC:
|
||||||
|
@ -689,9 +683,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
||||||
build.vmulps(inst.regX64, tmp1.reg, tmp2.reg);
|
build.vmulps(inst.regX64, tmp1.reg, tmp2.reg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vorps(inst.regX64, inst.regX64, vectorOrMaskOp());
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::DIV_VEC:
|
case IrCmd::DIV_VEC:
|
||||||
|
@ -718,9 +709,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
build.vandps(tmp2.reg, regOp(inst.b), vectorAndMaskOp());
|
||||||
build.vdivps(inst.regX64, tmp1.reg, tmp2.reg);
|
build.vdivps(inst.regX64, tmp1.reg, tmp2.reg);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vpinsrd(inst.regX64, inst.regX64, build.i32(LUA_TVECTOR), 3);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::UNM_VEC:
|
case IrCmd::UNM_VEC:
|
||||||
|
@ -745,9 +733,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
build.vxorpd(inst.regX64, inst.regX64, build.f32x4(-0.0, -0.0, -0.0, -0.0));
|
build.vxorpd(inst.regX64, inst.regX64, build.f32x4(-0.0, -0.0, -0.0, -0.0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vpinsrd(inst.regX64, inst.regX64, build.i32(LUA_TVECTOR), 3);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::NOT_ANY:
|
case IrCmd::NOT_ANY:
|
||||||
|
@ -1052,18 +1037,12 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
static_assert(sizeof(asU32) == sizeof(value), "Expecting float to be 32-bit");
|
static_assert(sizeof(asU32) == sizeof(value), "Expecting float to be 32-bit");
|
||||||
memcpy(&asU32, &value, sizeof(value));
|
memcpy(&asU32, &value, sizeof(value));
|
||||||
|
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
build.vmovaps(inst.regX64, build.u32x4(asU32, asU32, asU32, 0));
|
||||||
build.vmovaps(inst.regX64, build.u32x4(asU32, asU32, asU32, 0));
|
|
||||||
else
|
|
||||||
build.vmovaps(inst.regX64, build.u32x4(asU32, asU32, asU32, LUA_TVECTOR));
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
build.vcvtsd2ss(inst.regX64, inst.regX64, memRegDoubleOp(inst.a));
|
build.vcvtsd2ss(inst.regX64, inst.regX64, memRegDoubleOp(inst.a));
|
||||||
build.vpshufps(inst.regX64, inst.regX64, inst.regX64, 0b00'00'00'00);
|
build.vpshufps(inst.regX64, inst.regX64, inst.regX64, 0b00'00'00'00);
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenVectorTag2)
|
|
||||||
build.vpinsrd(inst.regX64, inst.regX64, build.i32(LUA_TVECTOR), 3);
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case IrCmd::TAG_VECTOR:
|
case IrCmd::TAG_VECTOR:
|
||||||
|
@ -1306,8 +1285,16 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
}
|
}
|
||||||
|
|
||||||
// fail to fallback on 'false' boolean value (falsy)
|
// fail to fallback on 'false' boolean value (falsy)
|
||||||
build.cmp(memRegUintOp(inst.b), 0);
|
if (!FFlag::LuauCodegenCheckTruthyFormB || inst.b.kind != IrOpKind::Constant)
|
||||||
jumpOrAbortOnUndef(ConditionX64::Equal, inst.c, next);
|
{
|
||||||
|
build.cmp(memRegUintOp(inst.b), 0);
|
||||||
|
jumpOrAbortOnUndef(ConditionX64::Equal, inst.c, next);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (intOp(inst.b) == 0)
|
||||||
|
jumpOrAbortOnUndef(inst.c, next);
|
||||||
|
}
|
||||||
|
|
||||||
if (inst.a.kind != IrOpKind::Constant)
|
if (inst.a.kind != IrOpKind::Constant)
|
||||||
build.setLabel(skip);
|
build.setLabel(skip);
|
||||||
|
@ -2306,7 +2293,7 @@ OperandX64 IrLoweringX64::bufferAddrOp(IrOp bufferOp, IrOp indexOp)
|
||||||
|
|
||||||
RegisterX64 IrLoweringX64::vecOp(IrOp op, ScopedRegX64& tmp)
|
RegisterX64 IrLoweringX64::vecOp(IrOp op, ScopedRegX64& tmp)
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenVectorOptAnd && FFlag::LuauCodegenVectorTag2)
|
if (FFlag::LuauCodegenVectorOptAnd)
|
||||||
{
|
{
|
||||||
IrInst source = function.instOp(op);
|
IrInst source = function.instOp(op);
|
||||||
CODEGEN_ASSERT(source.cmd != IrCmd::SUBSTITUTE); // we don't process substitutions
|
CODEGEN_ASSERT(source.cmd != IrCmd::SUBSTITUTE); // we don't process substitutions
|
||||||
|
@ -2365,16 +2352,6 @@ OperandX64 IrLoweringX64::vectorAndMaskOp()
|
||||||
return vectorAndMask;
|
return vectorAndMask;
|
||||||
}
|
}
|
||||||
|
|
||||||
OperandX64 IrLoweringX64::vectorOrMaskOp()
|
|
||||||
{
|
|
||||||
CODEGEN_ASSERT(!FFlag::LuauCodegenVectorTag2);
|
|
||||||
|
|
||||||
if (vectorOrMask.base == noreg)
|
|
||||||
vectorOrMask = build.u32x4(0, 0, 0, LUA_TVECTOR);
|
|
||||||
|
|
||||||
return vectorOrMask;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace X64
|
} // namespace X64
|
||||||
} // namespace CodeGen
|
} // namespace CodeGen
|
||||||
} // namespace Luau
|
} // namespace Luau
|
||||||
|
|
|
@ -63,7 +63,6 @@ struct IrLoweringX64
|
||||||
Label& labelOp(IrOp op) const;
|
Label& labelOp(IrOp op) const;
|
||||||
|
|
||||||
OperandX64 vectorAndMaskOp();
|
OperandX64 vectorAndMaskOp();
|
||||||
OperandX64 vectorOrMaskOp();
|
|
||||||
|
|
||||||
struct InterruptHandler
|
struct InterruptHandler
|
||||||
{
|
{
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
#include "ltm.h"
|
#include "ltm.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenVectorTag2, false)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenLoadTVTag, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenLoadTVTag, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
|
@ -388,8 +387,7 @@ static void translateInstBinaryNumeric(IrBuilder& build, int ra, int rb, int rc,
|
||||||
CODEGEN_ASSERT(!"Unknown TM op");
|
CODEGEN_ASSERT(!"Unknown TM op");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
result = build.inst(IrCmd::TAG_VECTOR, result);
|
||||||
result = build.inst(IrCmd::TAG_VECTOR, result);
|
|
||||||
|
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
||||||
return;
|
return;
|
||||||
|
@ -417,8 +415,7 @@ static void translateInstBinaryNumeric(IrBuilder& build, int ra, int rb, int rc,
|
||||||
CODEGEN_ASSERT(!"Unknown TM op");
|
CODEGEN_ASSERT(!"Unknown TM op");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
result = build.inst(IrCmd::TAG_VECTOR, result);
|
||||||
result = build.inst(IrCmd::TAG_VECTOR, result);
|
|
||||||
|
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
||||||
return;
|
return;
|
||||||
|
@ -446,8 +443,7 @@ static void translateInstBinaryNumeric(IrBuilder& build, int ra, int rb, int rc,
|
||||||
CODEGEN_ASSERT(!"Unknown TM op");
|
CODEGEN_ASSERT(!"Unknown TM op");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
result = build.inst(IrCmd::TAG_VECTOR, result);
|
||||||
result = build.inst(IrCmd::TAG_VECTOR, result);
|
|
||||||
|
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), result);
|
||||||
return;
|
return;
|
||||||
|
@ -589,8 +585,7 @@ void translateInstMinus(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
|
|
||||||
IrOp vb = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(rb));
|
IrOp vb = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(rb));
|
||||||
IrOp va = build.inst(IrCmd::UNM_VEC, vb);
|
IrOp va = build.inst(IrCmd::UNM_VEC, vb);
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
va = build.inst(IrCmd::TAG_VECTOR, va);
|
||||||
va = build.inst(IrCmd::TAG_VECTOR, va);
|
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), va);
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), va);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,9 +17,7 @@ namespace CodeGen
|
||||||
|
|
||||||
void NativeProtoExecDataDeleter::operator()(const uint32_t* instructionOffsets) const noexcept
|
void NativeProtoExecDataDeleter::operator()(const uint32_t* instructionOffsets) const noexcept
|
||||||
{
|
{
|
||||||
const NativeProtoExecDataHeader* header = &getNativeProtoExecDataHeader(instructionOffsets);
|
destroyNativeProtoExecData(instructionOffsets);
|
||||||
header->~NativeProtoExecDataHeader();
|
|
||||||
delete[] reinterpret_cast<const uint8_t*>(header);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[[nodiscard]] NativeProtoExecDataPtr createNativeProtoExecData(uint32_t bytecodeInstructionCount)
|
[[nodiscard]] NativeProtoExecDataPtr createNativeProtoExecData(uint32_t bytecodeInstructionCount)
|
||||||
|
@ -29,6 +27,13 @@ void NativeProtoExecDataDeleter::operator()(const uint32_t* instructionOffsets)
|
||||||
return NativeProtoExecDataPtr{reinterpret_cast<uint32_t*>(bytes.release() + sizeof(NativeProtoExecDataHeader))};
|
return NativeProtoExecDataPtr{reinterpret_cast<uint32_t*>(bytes.release() + sizeof(NativeProtoExecDataHeader))};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void destroyNativeProtoExecData(const uint32_t* instructionOffsets) noexcept
|
||||||
|
{
|
||||||
|
const NativeProtoExecDataHeader* header = &getNativeProtoExecDataHeader(instructionOffsets);
|
||||||
|
header->~NativeProtoExecDataHeader();
|
||||||
|
delete[] reinterpret_cast<const uint8_t*>(header);
|
||||||
|
}
|
||||||
|
|
||||||
[[nodiscard]] NativeProtoExecDataHeader& getNativeProtoExecDataHeader(uint32_t* instructionOffsets) noexcept
|
[[nodiscard]] NativeProtoExecDataHeader& getNativeProtoExecDataHeader(uint32_t* instructionOffsets) noexcept
|
||||||
{
|
{
|
||||||
return *reinterpret_cast<NativeProtoExecDataHeader*>(reinterpret_cast<uint8_t*>(instructionOffsets) - sizeof(NativeProtoExecDataHeader));
|
return *reinterpret_cast<NativeProtoExecDataHeader*>(reinterpret_cast<uint8_t*>(instructionOffsets) - sizeof(NativeProtoExecDataHeader));
|
||||||
|
@ -36,7 +41,6 @@ void NativeProtoExecDataDeleter::operator()(const uint32_t* instructionOffsets)
|
||||||
|
|
||||||
[[nodiscard]] const NativeProtoExecDataHeader& getNativeProtoExecDataHeader(const uint32_t* instructionOffsets) noexcept
|
[[nodiscard]] const NativeProtoExecDataHeader& getNativeProtoExecDataHeader(const uint32_t* instructionOffsets) noexcept
|
||||||
{
|
{
|
||||||
|
|
||||||
return *reinterpret_cast<const NativeProtoExecDataHeader*>(
|
return *reinterpret_cast<const NativeProtoExecDataHeader*>(
|
||||||
reinterpret_cast<const uint8_t*>(instructionOffsets) - sizeof(NativeProtoExecDataHeader));
|
reinterpret_cast<const uint8_t*>(instructionOffsets) - sizeof(NativeProtoExecDataHeader));
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,5 +112,83 @@ void initFunctions(NativeState& data)
|
||||||
data.context.executeSETLIST = executeSETLIST;
|
data.context.executeSETLIST = executeSETLIST;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void initFunctions(NativeContext& context)
|
||||||
|
{
|
||||||
|
static_assert(sizeof(context.luauF_table) == sizeof(luauF_table), "fastcall tables are not of the same length");
|
||||||
|
memcpy(context.luauF_table, luauF_table, sizeof(luauF_table));
|
||||||
|
|
||||||
|
context.luaV_lessthan = luaV_lessthan;
|
||||||
|
context.luaV_lessequal = luaV_lessequal;
|
||||||
|
context.luaV_equalval = luaV_equalval;
|
||||||
|
context.luaV_doarith = luaV_doarith;
|
||||||
|
context.luaV_dolen = luaV_dolen;
|
||||||
|
context.luaV_gettable = luaV_gettable;
|
||||||
|
context.luaV_settable = luaV_settable;
|
||||||
|
context.luaV_getimport = luaV_getimport;
|
||||||
|
context.luaV_concat = luaV_concat;
|
||||||
|
|
||||||
|
context.luaH_getn = luaH_getn;
|
||||||
|
context.luaH_new = luaH_new;
|
||||||
|
context.luaH_clone = luaH_clone;
|
||||||
|
context.luaH_resizearray = luaH_resizearray;
|
||||||
|
context.luaH_setnum = luaH_setnum;
|
||||||
|
|
||||||
|
context.luaC_barriertable = luaC_barriertable;
|
||||||
|
context.luaC_barrierf = luaC_barrierf;
|
||||||
|
context.luaC_barrierback = luaC_barrierback;
|
||||||
|
context.luaC_step = luaC_step;
|
||||||
|
|
||||||
|
context.luaF_close = luaF_close;
|
||||||
|
context.luaF_findupval = luaF_findupval;
|
||||||
|
context.luaF_newLclosure = luaF_newLclosure;
|
||||||
|
|
||||||
|
context.luaT_gettm = luaT_gettm;
|
||||||
|
context.luaT_objtypenamestr = luaT_objtypenamestr;
|
||||||
|
|
||||||
|
context.libm_exp = exp;
|
||||||
|
context.libm_pow = pow;
|
||||||
|
context.libm_fmod = fmod;
|
||||||
|
context.libm_log = log;
|
||||||
|
context.libm_log2 = log2;
|
||||||
|
context.libm_log10 = log10;
|
||||||
|
context.libm_ldexp = ldexp;
|
||||||
|
context.libm_round = round;
|
||||||
|
context.libm_frexp = frexp;
|
||||||
|
context.libm_modf = modf;
|
||||||
|
|
||||||
|
context.libm_asin = asin;
|
||||||
|
context.libm_sin = sin;
|
||||||
|
context.libm_sinh = sinh;
|
||||||
|
context.libm_acos = acos;
|
||||||
|
context.libm_cos = cos;
|
||||||
|
context.libm_cosh = cosh;
|
||||||
|
context.libm_atan = atan;
|
||||||
|
context.libm_atan2 = atan2;
|
||||||
|
context.libm_tan = tan;
|
||||||
|
context.libm_tanh = tanh;
|
||||||
|
|
||||||
|
context.forgLoopTableIter = forgLoopTableIter;
|
||||||
|
context.forgLoopNodeIter = forgLoopNodeIter;
|
||||||
|
context.forgLoopNonTableFallback = forgLoopNonTableFallback;
|
||||||
|
context.forgPrepXnextFallback = forgPrepXnextFallback;
|
||||||
|
context.callProlog = callProlog;
|
||||||
|
context.callEpilogC = callEpilogC;
|
||||||
|
|
||||||
|
context.callFallback = callFallback;
|
||||||
|
|
||||||
|
context.executeGETGLOBAL = executeGETGLOBAL;
|
||||||
|
context.executeSETGLOBAL = executeSETGLOBAL;
|
||||||
|
context.executeGETTABLEKS = executeGETTABLEKS;
|
||||||
|
context.executeSETTABLEKS = executeSETTABLEKS;
|
||||||
|
|
||||||
|
context.executeNAMECALL = executeNAMECALL;
|
||||||
|
context.executeFORGPREP = executeFORGPREP;
|
||||||
|
context.executeGETVARARGSMultRet = executeGETVARARGSMultRet;
|
||||||
|
context.executeGETVARARGSConst = executeGETVARARGSConst;
|
||||||
|
context.executeDUPCLOSURE = executeDUPCLOSURE;
|
||||||
|
context.executePREPVARARGS = executePREPVARARGS;
|
||||||
|
context.executeSETLIST = executeSETLIST;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace CodeGen
|
} // namespace CodeGen
|
||||||
} // namespace Luau
|
} // namespace Luau
|
||||||
|
|
|
@ -124,6 +124,7 @@ struct NativeState
|
||||||
};
|
};
|
||||||
|
|
||||||
void initFunctions(NativeState& data);
|
void initFunctions(NativeState& data);
|
||||||
|
void initFunctions(NativeContext& context);
|
||||||
|
|
||||||
} // namespace CodeGen
|
} // namespace CodeGen
|
||||||
} // namespace Luau
|
} // namespace Luau
|
||||||
|
|
|
@ -17,10 +17,10 @@
|
||||||
LUAU_FASTINTVARIABLE(LuauCodeGenMinLinearBlockPath, 3)
|
LUAU_FASTINTVARIABLE(LuauCodeGenMinLinearBlockPath, 3)
|
||||||
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
|
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauCodeGenCoverForgprepEffect, false)
|
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauCodeGenCoverForgprepEffect, false)
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
||||||
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauCodegenInferNumTag, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -717,17 +717,8 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||||
{
|
{
|
||||||
if (IrInst* arg = function.asInstOp(inst.b))
|
if (IrInst* arg = function.asInstOp(inst.b))
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
if (arg->cmd == IrCmd::TAG_VECTOR)
|
||||||
{
|
tag = LUA_TVECTOR;
|
||||||
if (arg->cmd == IrCmd::TAG_VECTOR)
|
|
||||||
tag = LUA_TVECTOR;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (arg->cmd == IrCmd::ADD_VEC || arg->cmd == IrCmd::SUB_VEC || arg->cmd == IrCmd::MUL_VEC || arg->cmd == IrCmd::DIV_VEC ||
|
|
||||||
arg->cmd == IrCmd::UNM_VEC)
|
|
||||||
tag = LUA_TVECTOR;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (FFlag::LuauCodegenLoadTVTag && arg->cmd == IrCmd::LOAD_TVALUE && arg->c.kind != IrOpKind::None)
|
if (FFlag::LuauCodegenLoadTVTag && arg->cmd == IrCmd::LOAD_TVALUE && arg->c.kind != IrOpKind::None)
|
||||||
tag = function.tagOp(arg->c);
|
tag = function.tagOp(arg->c);
|
||||||
|
@ -906,23 +897,58 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||||
{
|
{
|
||||||
uint8_t b = function.tagOp(inst.b);
|
uint8_t b = function.tagOp(inst.b);
|
||||||
|
|
||||||
if (uint8_t tag = state.tryGetTag(inst.a); tag != 0xff)
|
if (FFlag::LuauCodegenInferNumTag)
|
||||||
{
|
{
|
||||||
if (tag == b)
|
uint8_t tag = state.tryGetTag(inst.a);
|
||||||
|
|
||||||
|
if (tag == 0xff)
|
||||||
{
|
{
|
||||||
if (FFlag::DebugLuauAbortingChecks)
|
if (IrOp value = state.tryGetValue(inst.a); value.kind == IrOpKind::Constant)
|
||||||
replace(function, inst.c, build.undef());
|
{
|
||||||
|
if (function.constOp(value).kind == IrConstKind::Double)
|
||||||
|
tag = LUA_TNUMBER;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tag != 0xff)
|
||||||
|
{
|
||||||
|
if (tag == b)
|
||||||
|
{
|
||||||
|
if (FFlag::DebugLuauAbortingChecks)
|
||||||
|
replace(function, inst.c, build.undef());
|
||||||
|
else
|
||||||
|
kill(function, inst);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
kill(function, inst);
|
{
|
||||||
|
replace(function, block, index, {IrCmd::JUMP, inst.c}); // Shows a conflict in assumptions on this path
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
replace(function, block, index, {IrCmd::JUMP, inst.c}); // Shows a conflict in assumptions on this path
|
state.updateTag(inst.a, b); // We can assume the tag value going forward
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
state.updateTag(inst.a, b); // We can assume the tag value going forward
|
if (uint8_t tag = state.tryGetTag(inst.a); tag != 0xff)
|
||||||
|
{
|
||||||
|
if (tag == b)
|
||||||
|
{
|
||||||
|
if (FFlag::DebugLuauAbortingChecks)
|
||||||
|
replace(function, inst.c, build.undef());
|
||||||
|
else
|
||||||
|
kill(function, inst);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
replace(function, block, index, {IrCmd::JUMP, inst.c}); // Shows a conflict in assumptions on this path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
state.updateTag(inst.a, b); // We can assume the tag value going forward
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -1296,22 +1322,16 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||||
case IrCmd::SUB_VEC:
|
case IrCmd::SUB_VEC:
|
||||||
case IrCmd::MUL_VEC:
|
case IrCmd::MUL_VEC:
|
||||||
case IrCmd::DIV_VEC:
|
case IrCmd::DIV_VEC:
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
if (IrInst* a = function.asInstOp(inst.a); a && a->cmd == IrCmd::TAG_VECTOR)
|
||||||
{
|
replace(function, inst.a, a->a);
|
||||||
if (IrInst* a = function.asInstOp(inst.a); a && a->cmd == IrCmd::TAG_VECTOR)
|
|
||||||
replace(function, inst.a, a->a);
|
|
||||||
|
|
||||||
if (IrInst* b = function.asInstOp(inst.b); b && b->cmd == IrCmd::TAG_VECTOR)
|
if (IrInst* b = function.asInstOp(inst.b); b && b->cmd == IrCmd::TAG_VECTOR)
|
||||||
replace(function, inst.b, b->a);
|
replace(function, inst.b, b->a);
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IrCmd::UNM_VEC:
|
case IrCmd::UNM_VEC:
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
if (IrInst* a = function.asInstOp(inst.a); a && a->cmd == IrCmd::TAG_VECTOR)
|
||||||
{
|
replace(function, inst.a, a->a);
|
||||||
if (IrInst* a = function.asInstOp(inst.a); a && a->cmd == IrCmd::TAG_VECTOR)
|
|
||||||
replace(function, inst.a, a->a);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
#include "lobject.h"
|
#include "lobject.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenRemoveDeadStores4, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenRemoveDeadStores4, false)
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
||||||
|
|
||||||
// TODO: optimization can be improved by knowing which registers are live in at each VM exit
|
// TODO: optimization can be improved by knowing which registers are live in at each VM exit
|
||||||
|
@ -340,17 +339,8 @@ static void markDeadStoresInInst(RemoveDeadStoreState& state, IrBuilder& build,
|
||||||
// TODO (CLI-101027): similar code is used in constant propagation optimization and should be shared in utilities
|
// TODO (CLI-101027): similar code is used in constant propagation optimization and should be shared in utilities
|
||||||
if (IrInst* arg = function.asInstOp(inst.b))
|
if (IrInst* arg = function.asInstOp(inst.b))
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenVectorTag2)
|
if (arg->cmd == IrCmd::TAG_VECTOR)
|
||||||
{
|
regInfo.maybeGco = false;
|
||||||
if (arg->cmd == IrCmd::TAG_VECTOR)
|
|
||||||
regInfo.maybeGco = false;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (arg->cmd == IrCmd::ADD_VEC || arg->cmd == IrCmd::SUB_VEC || arg->cmd == IrCmd::MUL_VEC || arg->cmd == IrCmd::DIV_VEC ||
|
|
||||||
arg->cmd == IrCmd::UNM_VEC)
|
|
||||||
regInfo.maybeGco = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (FFlag::LuauCodegenLoadTVTag && arg->cmd == IrCmd::LOAD_TVALUE && arg->c.kind != IrOpKind::None)
|
if (FFlag::LuauCodegenLoadTVTag && arg->cmd == IrCmd::LOAD_TVALUE && arg->c.kind != IrOpKind::None)
|
||||||
regInfo.maybeGco = isGCO(function.tagOp(arg->c));
|
regInfo.maybeGco = isGCO(function.tagOp(arg->c));
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
|
|
||||||
#include "lapi.h"
|
#include "lapi.h"
|
||||||
|
|
||||||
|
LUAU_FASTFLAG(LuauCodegenDetailedCompilationResult)
|
||||||
|
|
||||||
int luau_codegen_supported()
|
int luau_codegen_supported()
|
||||||
{
|
{
|
||||||
return Luau::CodeGen::isSupported();
|
return Luau::CodeGen::isSupported();
|
||||||
|
@ -17,5 +19,8 @@ void luau_codegen_create(lua_State* L)
|
||||||
|
|
||||||
void luau_codegen_compile(lua_State* L, int idx)
|
void luau_codegen_compile(lua_State* L, int idx)
|
||||||
{
|
{
|
||||||
Luau::CodeGen::compile(L, idx);
|
if (FFlag::LuauCodegenDetailedCompilationResult)
|
||||||
|
Luau::CodeGen::compile(L, idx);
|
||||||
|
else
|
||||||
|
Luau::CodeGen::compile_DEPRECATED(L, idx);
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ struct FValue
|
||||||
list = this;
|
list = this;
|
||||||
}
|
}
|
||||||
|
|
||||||
operator T() const
|
LUAU_FORCEINLINE operator T() const
|
||||||
{
|
{
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,6 +107,7 @@ target_sources(Luau.CodeGen PRIVATE
|
||||||
CodeGen/src/CodeBlockUnwind.cpp
|
CodeGen/src/CodeBlockUnwind.cpp
|
||||||
CodeGen/src/CodeGen.cpp
|
CodeGen/src/CodeGen.cpp
|
||||||
CodeGen/src/CodeGenAssembly.cpp
|
CodeGen/src/CodeGenAssembly.cpp
|
||||||
|
CodeGen/src/CodeGenContext.cpp
|
||||||
CodeGen/src/CodeGenUtils.cpp
|
CodeGen/src/CodeGenUtils.cpp
|
||||||
CodeGen/src/CodeGenA64.cpp
|
CodeGen/src/CodeGenA64.cpp
|
||||||
CodeGen/src/CodeGenX64.cpp
|
CodeGen/src/CodeGenX64.cpp
|
||||||
|
@ -139,6 +140,7 @@ target_sources(Luau.CodeGen PRIVATE
|
||||||
|
|
||||||
CodeGen/src/BitUtils.h
|
CodeGen/src/BitUtils.h
|
||||||
CodeGen/src/ByteUtils.h
|
CodeGen/src/ByteUtils.h
|
||||||
|
CodeGen/src/CodeGenContext.h
|
||||||
CodeGen/src/CodeGenLower.h
|
CodeGen/src/CodeGenLower.h
|
||||||
CodeGen/src/CodeGenUtils.h
|
CodeGen/src/CodeGenUtils.h
|
||||||
CodeGen/src/CodeGenA64.h
|
CodeGen/src/CodeGenA64.h
|
||||||
|
@ -356,13 +358,19 @@ target_sources(isocline PRIVATE
|
||||||
extern/isocline/src/isocline.c
|
extern/isocline/src/isocline.c
|
||||||
)
|
)
|
||||||
|
|
||||||
# Common sources shared between all CLI apps
|
|
||||||
target_sources(Luau.CLI.lib PRIVATE
|
if (TARGET Luau.Repl.CLI OR TARGET Luau.Analyze.CLI OR
|
||||||
CLI/FileUtils.cpp
|
TARGET Luau.Ast.CLI OR TARGET Luau.CLI.Test OR
|
||||||
CLI/Flags.cpp
|
TARGET Luau.Reduce.CLI OR TARGET Luau.Compile.CLI OR
|
||||||
CLI/Flags.h
|
TARGET Luau.Bytecode.CLI)
|
||||||
CLI/FileUtils.h
|
# Common sources shared between all CLI apps.
|
||||||
)
|
target_sources(Luau.CLI.lib PRIVATE
|
||||||
|
CLI/FileUtils.cpp
|
||||||
|
CLI/Flags.cpp
|
||||||
|
CLI/Flags.h
|
||||||
|
CLI/FileUtils.h
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
if(TARGET Luau.Repl.CLI)
|
if(TARGET Luau.Repl.CLI)
|
||||||
# Luau.Repl.CLI Sources
|
# Luau.Repl.CLI Sources
|
||||||
|
|
|
@ -186,8 +186,16 @@ int lua_getinfo(lua_State* L, int level, const char* what, lua_Debug* ar)
|
||||||
CallInfo* ci = NULL;
|
CallInfo* ci = NULL;
|
||||||
if (level < 0)
|
if (level < 0)
|
||||||
{
|
{
|
||||||
const TValue* func = luaA_toobject(L, level);
|
// element has to be within stack
|
||||||
api_check(L, ttisfunction(func));
|
if (-level > L->top - L->base)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
StkId func = L->top + level;
|
||||||
|
|
||||||
|
// and it has to be a function
|
||||||
|
if (!ttisfunction(func))
|
||||||
|
return 0;
|
||||||
|
|
||||||
f = clvalue(func);
|
f = clvalue(func);
|
||||||
}
|
}
|
||||||
else if (unsigned(level) < unsigned(L->ci - L->base_ci))
|
else if (unsigned(level) < unsigned(L->ci - L->base_ci))
|
||||||
|
|
|
@ -35,6 +35,9 @@ LUAU_FASTINT(CodegenHeuristicsInstructionLimit)
|
||||||
LUAU_FASTFLAG(LuauLoadExceptionSafe)
|
LUAU_FASTFLAG(LuauLoadExceptionSafe)
|
||||||
LUAU_DYNAMIC_FASTFLAG(LuauDebugInfoDupArgLeftovers)
|
LUAU_DYNAMIC_FASTFLAG(LuauDebugInfoDupArgLeftovers)
|
||||||
LUAU_FASTFLAG(LuauCompileRepeatUntilSkippedLocals)
|
LUAU_FASTFLAG(LuauCompileRepeatUntilSkippedLocals)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenInferNumTag)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenDetailedCompilationResult)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenCheckTruthyFormB)
|
||||||
|
|
||||||
static lua_CompileOptions defaultOptions()
|
static lua_CompileOptions defaultOptions()
|
||||||
{
|
{
|
||||||
|
@ -238,7 +241,12 @@ static StateRef runConformance(const char* name, void (*setup)(lua_State* L) = n
|
||||||
free(bytecode);
|
free(bytecode);
|
||||||
|
|
||||||
if (result == 0 && codegen && !skipCodegen && luau_codegen_supported())
|
if (result == 0 && codegen && !skipCodegen && luau_codegen_supported())
|
||||||
Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions);
|
{
|
||||||
|
if (FFlag::LuauCodegenDetailedCompilationResult)
|
||||||
|
Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions);
|
||||||
|
else
|
||||||
|
Luau::CodeGen::compile_DEPRECATED(L, -1, Luau::CodeGen::CodeGen_ColdFunctions);
|
||||||
|
}
|
||||||
|
|
||||||
int status = (result == 0) ? lua_resume(L, nullptr, 0) : LUA_ERRSYNTAX;
|
int status = (result == 0) ? lua_resume(L, nullptr, 0) : LUA_ERRSYNTAX;
|
||||||
|
|
||||||
|
@ -1825,6 +1833,18 @@ TEST_CASE("LightuserdataApi")
|
||||||
globalState.reset();
|
globalState.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE("DebugApi")
|
||||||
|
{
|
||||||
|
StateRef globalState(luaL_newstate(), lua_close);
|
||||||
|
lua_State* L = globalState.get();
|
||||||
|
|
||||||
|
lua_pushnumber(L, 10);
|
||||||
|
|
||||||
|
lua_Debug ar;
|
||||||
|
CHECK(lua_getinfo(L, -1, "f", &ar) == 0); // number is not a function
|
||||||
|
CHECK(lua_getinfo(L, -10, "f", &ar) == 0); // not on stack
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE("Iter")
|
TEST_CASE("Iter")
|
||||||
{
|
{
|
||||||
runConformance("iter.lua");
|
runConformance("iter.lua");
|
||||||
|
@ -2051,6 +2071,9 @@ TEST_CASE("SafeEnv")
|
||||||
|
|
||||||
TEST_CASE("Native")
|
TEST_CASE("Native")
|
||||||
{
|
{
|
||||||
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenInferNumTag, true};
|
||||||
|
ScopedFastFlag luauCodegenCheckTruthyFormB{FFlag::LuauCodegenCheckTruthyFormB, true};
|
||||||
|
|
||||||
// This tests requires code to run natively, otherwise all 'is_native' checks will fail
|
// This tests requires code to run natively, otherwise all 'is_native' checks will fail
|
||||||
if (!codegen || !luau_codegen_supported())
|
if (!codegen || !luau_codegen_supported())
|
||||||
return;
|
return;
|
||||||
|
@ -2108,6 +2131,8 @@ TEST_CASE("NativeTypeAnnotations")
|
||||||
|
|
||||||
TEST_CASE("HugeFunction")
|
TEST_CASE("HugeFunction")
|
||||||
{
|
{
|
||||||
|
ScopedFastFlag luauCodegenDetailedCompilationResult{FFlag::LuauCodegenDetailedCompilationResult, true};
|
||||||
|
|
||||||
std::string source = makeHugeFunctionSource();
|
std::string source = makeHugeFunctionSource();
|
||||||
|
|
||||||
StateRef globalState(luaL_newstate(), lua_close);
|
StateRef globalState(luaL_newstate(), lua_close);
|
||||||
|
@ -2214,6 +2239,7 @@ TEST_CASE("IrInstructionLimit")
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastInt codegenHeuristicsInstructionLimit{FInt::CodegenHeuristicsInstructionLimit, 50'000};
|
ScopedFastInt codegenHeuristicsInstructionLimit{FInt::CodegenHeuristicsInstructionLimit, 50'000};
|
||||||
|
ScopedFastFlag luauCodegenDetailedCompilationResult{FFlag::LuauCodegenDetailedCompilationResult, true};
|
||||||
|
|
||||||
std::string source;
|
std::string source;
|
||||||
|
|
||||||
|
@ -2254,10 +2280,18 @@ TEST_CASE("IrInstructionLimit")
|
||||||
REQUIRE(result == 0);
|
REQUIRE(result == 0);
|
||||||
|
|
||||||
Luau::CodeGen::CompilationStats nativeStats = {};
|
Luau::CodeGen::CompilationStats nativeStats = {};
|
||||||
Luau::CodeGen::CodeGenCompilationResult nativeResult = Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions, &nativeStats);
|
Luau::CodeGen::CompilationResult nativeResult = Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions, &nativeStats);
|
||||||
|
|
||||||
// Limit is not hit immediately, so with some functions compiled it should be a success
|
// Limit is not hit immediately, so with some functions compiled it should be a success
|
||||||
CHECK(nativeResult == Luau::CodeGen::CodeGenCompilationResult::CodeGenOverflowInstructionLimit);
|
CHECK(nativeResult.result == Luau::CodeGen::CodeGenCompilationResult::Success);
|
||||||
|
|
||||||
|
// But it has some failed functions
|
||||||
|
CHECK(nativeResult.hasErrors());
|
||||||
|
REQUIRE(!nativeResult.protoFailures.empty());
|
||||||
|
|
||||||
|
CHECK(nativeResult.protoFailures.front().result == Luau::CodeGen::CodeGenCompilationResult::CodeGenOverflowInstructionLimit);
|
||||||
|
CHECK(nativeResult.protoFailures.front().line != -1);
|
||||||
|
CHECK(nativeResult.protoFailures.front().debugname != "");
|
||||||
|
|
||||||
// We should be able to compile at least one of our functions
|
// We should be able to compile at least one of our functions
|
||||||
CHECK(nativeStats.functionsCompiled > 0);
|
CHECK(nativeStats.functionsCompiled > 0);
|
||||||
|
|
|
@ -631,4 +631,31 @@ print(t.x)
|
||||||
CHECK(phi->operands.at(1) == x2);
|
CHECK(phi->operands.at(1) == x2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(DataFlowGraphFixture, "insert_trivial_phi_nodes_inside_of_phi_nodes")
|
||||||
|
{
|
||||||
|
dfg(R"(
|
||||||
|
local t = {}
|
||||||
|
|
||||||
|
local function f(k: string)
|
||||||
|
if t[k] ~= nil then
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
t[k] = 5
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
DefId t1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]); // local t = {}
|
||||||
|
DefId t2 = getDef<AstExprLocal, 1>(); // t[k] ~= nil
|
||||||
|
DefId t3 = getDef<AstExprLocal, 3>(); // t[k] = 5
|
||||||
|
|
||||||
|
CHECK(t1 != t2);
|
||||||
|
CHECK(t2 == t3);
|
||||||
|
|
||||||
|
const Phi* t2phi = get<Phi>(t2);
|
||||||
|
REQUIRE(t2phi);
|
||||||
|
CHECK(t2phi->operands.size() == 1);
|
||||||
|
CHECK(t2phi->operands.at(0) == t1);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -12,9 +12,9 @@
|
||||||
|
|
||||||
#include <limits.h>
|
#include <limits.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
||||||
LUAU_FASTFLAG(DebugLuauAbortingChecks)
|
LUAU_FASTFLAG(DebugLuauAbortingChecks)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenInferNumTag)
|
||||||
|
|
||||||
using namespace Luau::CodeGen;
|
using namespace Luau::CodeGen;
|
||||||
|
|
||||||
|
@ -2501,8 +2501,6 @@ bb_fallback_1:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "TagVectorSkipErrorFix")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "TagVectorSkipErrorFix")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
|
|
||||||
IrOp block = build.block(IrBlockKind::Internal);
|
IrOp block = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(block);
|
build.beginBlock(block);
|
||||||
|
@ -2631,6 +2629,30 @@ bb_0:
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(IrBuilderFixture, "InferNumberTagFromLimitedContext")
|
||||||
|
{
|
||||||
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenInferNumTag, true};
|
||||||
|
|
||||||
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
|
build.beginBlock(entry);
|
||||||
|
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.constDouble(2.0));
|
||||||
|
build.inst(IrCmd::CHECK_TAG, build.vmReg(0), build.constTag(ttable), build.vmExit(1));
|
||||||
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
||||||
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
||||||
|
|
||||||
|
updateUseCounts(build.function);
|
||||||
|
computeCfgInfo(build.function);
|
||||||
|
constPropInBlockChains(build, true);
|
||||||
|
|
||||||
|
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
||||||
|
bb_0:
|
||||||
|
STORE_DOUBLE R0, 2
|
||||||
|
JUMP exit(1)
|
||||||
|
|
||||||
|
)");
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
||||||
TEST_SUITE_BEGIN("Analysis");
|
TEST_SUITE_BEGIN("Analysis");
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenVectorTag2)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores4)
|
||||||
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
LUAU_FASTFLAG(LuauCodegenLoadTVTag)
|
||||||
|
|
||||||
|
@ -65,8 +64,6 @@ TEST_SUITE_BEGIN("IrLowering");
|
||||||
|
|
||||||
TEST_CASE("VectorReciprocal")
|
TEST_CASE("VectorReciprocal")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vecrcp(a: vector)
|
local function vecrcp(a: vector)
|
||||||
return 1 / a
|
return 1 / a
|
||||||
|
@ -121,8 +118,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorAdd")
|
TEST_CASE("VectorAdd")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vec3add(a: vector, b: vector)
|
local function vec3add(a: vector, b: vector)
|
||||||
return a + b
|
return a + b
|
||||||
|
@ -149,8 +144,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorMinus")
|
TEST_CASE("VectorMinus")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vec3minus(a: vector)
|
local function vec3minus(a: vector)
|
||||||
return -a
|
return -a
|
||||||
|
@ -175,7 +168,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorSubMulDiv")
|
TEST_CASE("VectorSubMulDiv")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -210,7 +202,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorSubMulDiv2")
|
TEST_CASE("VectorSubMulDiv2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -241,7 +232,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorMulDivMixed")
|
TEST_CASE("VectorMulDivMixed")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -405,7 +395,6 @@ bb_bytecode_0:
|
||||||
TEST_CASE("VectorConstantTag")
|
TEST_CASE("VectorConstantTag")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores4, true};
|
||||||
ScopedFastFlag luauCodegenVectorTag2{FFlag::LuauCodegenVectorTag2, true};
|
|
||||||
ScopedFastFlag luauCodegenLoadTVTag{FFlag::LuauCodegenLoadTVTag, true};
|
ScopedFastFlag luauCodegenLoadTVTag{FFlag::LuauCodegenLoadTVTag, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
|
|
@ -450,6 +450,26 @@ TEST_CASE_FIXTURE(SubtypeFixture, "basic_typefamily_with_generics")
|
||||||
CHECK(result.isSubtype);
|
CHECK(result.isSubtype);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(SubtypeFixture, "variadic_subpath_in_pack")
|
||||||
|
{
|
||||||
|
TypePackId subTArgs = arena.addTypePack(TypePack{{builtinTypes->stringType, builtinTypes->stringType}, builtinTypes->anyTypePack});
|
||||||
|
TypePackId superTArgs = arena.addTypePack(TypePack{{builtinTypes->numberType}, builtinTypes->anyTypePack});
|
||||||
|
// (string, string, ...any) -> number
|
||||||
|
TypeId functionSub = arena.addType(FunctionType{subTArgs, arena.addTypePack({builtinTypes->numberType})});
|
||||||
|
// (number, ...any) -> string
|
||||||
|
TypeId functionSuper = arena.addType(FunctionType{superTArgs, arena.addTypePack({builtinTypes->stringType})});
|
||||||
|
|
||||||
|
|
||||||
|
SubtypingResult result = isSubtype(functionSub, functionSuper);
|
||||||
|
CHECK(result.reasoning == std::vector{SubtypingReasoning{TypePath::PathBuilder().rets().index(0).build(),
|
||||||
|
TypePath::PathBuilder().rets().index(0).build(), SubtypingVariance::Covariant},
|
||||||
|
SubtypingReasoning{TypePath::PathBuilder().args().index(0).build(), TypePath::PathBuilder().args().index(0).build(),
|
||||||
|
SubtypingVariance::Contravariant},
|
||||||
|
SubtypingReasoning{TypePath::PathBuilder().args().index(1).build(),
|
||||||
|
TypePath::PathBuilder().args().tail().variadic().build(), SubtypingVariance::Contravariant}});
|
||||||
|
CHECK(!result.isSubtype);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(SubtypeFixture, "any <!: unknown")
|
TEST_CASE_FIXTURE(SubtypeFixture, "any <!: unknown")
|
||||||
{
|
{
|
||||||
CHECK_IS_NOT_SUBTYPE(builtinTypes->anyType, builtinTypes->unknownType);
|
CHECK_IS_NOT_SUBTYPE(builtinTypes->anyType, builtinTypes->unknownType);
|
||||||
|
|
|
@ -14,6 +14,7 @@ LUAU_FASTFLAG(LuauRecursiveTypeParameterRestriction);
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
||||||
LUAU_FASTFLAG(LuauCheckedFunctionSyntax);
|
LUAU_FASTFLAG(LuauCheckedFunctionSyntax);
|
||||||
LUAU_FASTFLAG(DebugLuauSharedSelf);
|
LUAU_FASTFLAG(DebugLuauSharedSelf);
|
||||||
|
LUAU_FASTFLAG(LuauStringifyCyclesRootedAtPacks);
|
||||||
|
|
||||||
TEST_SUITE_BEGIN("ToString");
|
TEST_SUITE_BEGIN("ToString");
|
||||||
|
|
||||||
|
@ -878,7 +879,11 @@ TEST_CASE_FIXTURE(Fixture, "toStringNamedFunction_include_self_param")
|
||||||
|
|
||||||
TypeId parentTy = requireType("foo");
|
TypeId parentTy = requireType("foo");
|
||||||
auto ttv = get<TableType>(follow(parentTy));
|
auto ttv = get<TableType>(follow(parentTy));
|
||||||
auto ftv = get<FunctionType>(follow(ttv->props.at("method").type()));
|
REQUIRE(ttv);
|
||||||
|
|
||||||
|
TypeId methodTy = ttv->props.at("method").type();
|
||||||
|
auto ftv = get<FunctionType>(follow(methodTy));
|
||||||
|
REQUIRE_MESSAGE(ftv, methodTy);
|
||||||
|
|
||||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
CHECK_EQ("foo:method(self: unknown, arg: string): ()", toStringNamedFunction("foo:method", *ftv));
|
CHECK_EQ("foo:method(self: unknown, arg: string): ()", toStringNamedFunction("foo:method", *ftv));
|
||||||
|
@ -998,4 +1003,34 @@ TEST_CASE_FIXTURE(Fixture, "read_only_properties")
|
||||||
CHECK("{ read x: string }" == toString(requireTypeAlias("B"), {true}));
|
CHECK("{ read x: string }" == toString(requireTypeAlias("B"), {true}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "cycle_rooted_in_a_pack")
|
||||||
|
{
|
||||||
|
ScopedFastFlag sff{FFlag::LuauStringifyCyclesRootedAtPacks, true};
|
||||||
|
|
||||||
|
TypeArena arena;
|
||||||
|
|
||||||
|
TypePackId thePack = arena.addTypePack({builtinTypes->numberType, builtinTypes->numberType});
|
||||||
|
TypePack* packPtr = getMutable<TypePack>(thePack);
|
||||||
|
REQUIRE(packPtr);
|
||||||
|
|
||||||
|
const TableType::Props theProps = {
|
||||||
|
{"BaseField", Property::readonly(builtinTypes->unknownType)},
|
||||||
|
{"BaseMethod", Property::readonly(arena.addType(
|
||||||
|
FunctionType{
|
||||||
|
thePack,
|
||||||
|
arena.addTypePack({})
|
||||||
|
}
|
||||||
|
))}
|
||||||
|
};
|
||||||
|
|
||||||
|
TypeId theTable = arena.addType(TableType{theProps, {}, TypeLevel{}, TableState::Sealed});
|
||||||
|
|
||||||
|
packPtr->head[0] = theTable;
|
||||||
|
|
||||||
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
CHECK("tp1 where tp1 = { read BaseField: unknown, read BaseMethod: (tp1) -> () }, number" == toString(thePack));
|
||||||
|
else
|
||||||
|
CHECK("tp1 where tp1 = {| BaseField: unknown, BaseMethod: (tp1) -> () |}, number" == toString(thePack));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -552,6 +552,21 @@ TEST_CASE_FIXTURE(ClassFixture, "keyof_type_family_common_subset_if_union_of_dif
|
||||||
LUAU_REQUIRE_NO_ERRORS(result);
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(ClassFixture, "binary_type_family_works_with_default_argument")
|
||||||
|
{
|
||||||
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
return;
|
||||||
|
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
type result = mul<number>
|
||||||
|
|
||||||
|
local function thunk(): result return 5 * 4 end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
CHECK("() -> number" == toString(requireType("thunk")));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(ClassFixture, "vector2_multiply_is_overloaded")
|
TEST_CASE_FIXTURE(ClassFixture, "vector2_multiply_is_overloaded")
|
||||||
{
|
{
|
||||||
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
|
|
@ -687,7 +687,7 @@ TEST_CASE_FIXTURE(Fixture, "higher_order_function_3")
|
||||||
REQUIRE_EQ(1, argVec.size());
|
REQUIRE_EQ(1, argVec.size());
|
||||||
|
|
||||||
const TableType* argType = get<TableType>(follow(argVec[0]));
|
const TableType* argType = get<TableType>(follow(argVec[0]));
|
||||||
REQUIRE(argType != nullptr);
|
REQUIRE_MESSAGE(argType != nullptr, argVec[0]);
|
||||||
|
|
||||||
CHECK(bool(argType->indexer));
|
CHECK(bool(argType->indexer));
|
||||||
}
|
}
|
||||||
|
@ -2422,4 +2422,26 @@ TEST_CASE_FIXTURE(Fixture, "pass_table_literal_to_function_expecting_optional_pr
|
||||||
LUAU_REQUIRE_NO_ERRORS(result);
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "dont_infer_overloaded_functions")
|
||||||
|
{
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
function getR6Attachments(model)
|
||||||
|
model:FindFirstChild("Right Leg")
|
||||||
|
model:FindFirstChild("Left Leg")
|
||||||
|
model:FindFirstChild("Torso")
|
||||||
|
model:FindFirstChild("Torso")
|
||||||
|
model:FindFirstChild("Head")
|
||||||
|
model:FindFirstChild("Left Arm")
|
||||||
|
model:FindFirstChild("Right Arm")
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
|
||||||
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
CHECK("<a...>(t1) -> () where t1 = { read FindFirstChild: (t1, string) -> (a...) }" == toString(requireType("getR6Attachments")));
|
||||||
|
else
|
||||||
|
CHECK("<a...>(t1) -> () where t1 = {+ FindFirstChild: (t1, string) -> (a...) +}" == toString(requireType("getR6Attachments")));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -402,15 +402,7 @@ TEST_CASE_FIXTURE(Fixture, "calling_self_generic_methods")
|
||||||
end
|
end
|
||||||
)");
|
)");
|
||||||
|
|
||||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
LUAU_REQUIRE_ERRORS(result);
|
||||||
{
|
|
||||||
LUAU_REQUIRE_NO_ERRORS(result);
|
|
||||||
|
|
||||||
CHECK_EQ("{ f: (t1) -> (), id: <a>(unknown, a) -> a } where t1 = { read id: ((t1, number) -> number) & ((t1, string) -> string) }",
|
|
||||||
toString(requireType("x"), {true}));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
LUAU_REQUIRE_ERRORS(result);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(Fixture, "infer_generic_property")
|
TEST_CASE_FIXTURE(Fixture, "infer_generic_property")
|
||||||
|
|
|
@ -1054,4 +1054,20 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "iterate_array_of_singletons")
|
||||||
LUAU_REQUIRE_ERRORS(result);
|
LUAU_REQUIRE_ERRORS(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(BuiltinsFixture, "iter_mm_results_are_lvalue")
|
||||||
|
{
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
local foo = setmetatable({}, {
|
||||||
|
__iter = function()
|
||||||
|
return pairs({1, 2, 3})
|
||||||
|
end,
|
||||||
|
})
|
||||||
|
|
||||||
|
for k, v in foo do
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -2150,5 +2150,23 @@ TEST_CASE_FIXTURE(RefinementClassFixture, "mutate_prop_of_some_refined_symbol_2"
|
||||||
LUAU_REQUIRE_NO_ERRORS(result);
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(BuiltinsFixture, "ensure_t_after_return_references_all_reachable_points")
|
||||||
|
{
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
local t = {}
|
||||||
|
|
||||||
|
local function f(k: string)
|
||||||
|
if t[k] ~= nil then
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
t[k] = 5
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
|
||||||
|
CHECK_EQ("{ [string]: number }", toString(requireTypeAtPosition({8, 12}), {true}));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -40,7 +40,6 @@ function FadeValue:destroy()
|
||||||
self.finalCallback = nil
|
self.finalCallback = nil
|
||||||
end
|
end
|
||||||
)");
|
)");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(Fixture, "basic")
|
TEST_CASE_FIXTURE(Fixture, "basic")
|
||||||
|
@ -4247,6 +4246,43 @@ TEST_CASE_FIXTURE(Fixture, "refined_thing_can_be_an_array")
|
||||||
CHECK("<a>({a}, a) -> a" == toString(requireType("foo")));
|
CHECK("<a>({a}, a) -> a" == toString(requireType("foo")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "parameter_was_set_an_indexer_and_bounded_by_string")
|
||||||
|
{
|
||||||
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
return;
|
||||||
|
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
function f(t)
|
||||||
|
local s: string = t
|
||||||
|
t[5] = 7
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_ERROR_COUNT(3, result);
|
||||||
|
|
||||||
|
CHECK_EQ("Parameter 't' has been reduced to never. This function is not callable with any possible value.", toString(result.errors[0]));
|
||||||
|
CHECK_EQ("Parameter 't' is required to be a subtype of 'string' here.", toString(result.errors[1]));
|
||||||
|
CHECK_EQ("Parameter 't' is required to be a subtype of '{number}' here.", toString(result.errors[2]));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "parameter_was_set_an_indexer_and_bounded_by_another_parameter")
|
||||||
|
{
|
||||||
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
return;
|
||||||
|
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
function f(t1, t2)
|
||||||
|
t1[5] = 7 -- 't1 <: {number}
|
||||||
|
t2 = t1 -- 't1 <: 't2
|
||||||
|
t1[5] = 7 -- 't1 <: {number}
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
|
||||||
|
CHECK_EQ("({number}, unknown) -> ()", toString(requireType("f")));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(Fixture, "mymovie_read_write_tables_bug")
|
TEST_CASE_FIXTURE(Fixture, "mymovie_read_write_tables_bug")
|
||||||
{
|
{
|
||||||
CheckResult result = check(R"(
|
CheckResult result = check(R"(
|
||||||
|
@ -4286,6 +4322,4 @@ TEST_CASE_FIXTURE(Fixture, "mymovie_read_write_tables_bug_2")
|
||||||
LUAU_REQUIRE_ERRORS(result);
|
LUAU_REQUIRE_ERRORS(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -451,5 +451,4 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "typestates_preserve_error_suppression_proper
|
||||||
CHECK("*error-type* | string" == toString(requireTypeAtPosition({3, 16}), {true}));
|
CHECK("*error-type* | string" == toString(requireTypeAtPosition({3, 16}), {true}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -173,6 +173,41 @@ end
|
||||||
assert(pcall(fuzzfail18) == true)
|
assert(pcall(fuzzfail18) == true)
|
||||||
assert(fuzzfail18() == 0)
|
assert(fuzzfail18() == 0)
|
||||||
|
|
||||||
|
local function fuzzfail19()
|
||||||
|
local _ = 2
|
||||||
|
_ += _
|
||||||
|
_ = _,_ >= _,{_ >= _,_ >= _,_(),}
|
||||||
|
|
||||||
|
local _ = 2
|
||||||
|
do
|
||||||
|
_ = assert({n0=_,_,n0=_,}),{_={_[_()],},_,}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
assert(pcall(fuzzfail19) == false)
|
||||||
|
|
||||||
|
local function fuzzfail20()
|
||||||
|
assert(true)
|
||||||
|
assert(false,(_),true)
|
||||||
|
_ = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
assert(pcall(fuzzfail20) == false)
|
||||||
|
|
||||||
|
local function fuzzfail21(...)
|
||||||
|
local _ = assert,_
|
||||||
|
if _ then else return _ / _ end
|
||||||
|
_(_)
|
||||||
|
_(_,_)
|
||||||
|
assert(...,_)
|
||||||
|
_((not _),_)
|
||||||
|
_(true,_ / _)
|
||||||
|
_(_,_())
|
||||||
|
return _
|
||||||
|
end
|
||||||
|
|
||||||
|
assert(pcall(fuzzfail21) == false)
|
||||||
|
|
||||||
local function arraySizeInv1()
|
local function arraySizeInv1()
|
||||||
local t = {1, 2, nil, nil, nil, nil, nil, nil, nil, true}
|
local t = {1, 2, nil, nil, nil, nil, nil, nil, nil, true}
|
||||||
|
|
||||||
|
|
|
@ -39,9 +39,6 @@ DefinitionTests.class_definition_overload_metamethods
|
||||||
DefinitionTests.class_definition_string_props
|
DefinitionTests.class_definition_string_props
|
||||||
DefinitionTests.declaring_generic_functions
|
DefinitionTests.declaring_generic_functions
|
||||||
DefinitionTests.definition_file_classes
|
DefinitionTests.definition_file_classes
|
||||||
Differ.equal_generictp_cyclic
|
|
||||||
Differ.generictp_normal
|
|
||||||
Differ.generictp_normal_2
|
|
||||||
Differ.metatable_metamissing_left
|
Differ.metatable_metamissing_left
|
||||||
Differ.metatable_metamissing_right
|
Differ.metatable_metamissing_right
|
||||||
Differ.metatable_metanormal
|
Differ.metatable_metanormal
|
||||||
|
@ -79,7 +76,6 @@ GenericsTests.infer_generic_function_function_argument_2
|
||||||
GenericsTests.infer_generic_function_function_argument_3
|
GenericsTests.infer_generic_function_function_argument_3
|
||||||
GenericsTests.infer_generic_function_function_argument_overloaded
|
GenericsTests.infer_generic_function_function_argument_overloaded
|
||||||
GenericsTests.instantiated_function_argument_names
|
GenericsTests.instantiated_function_argument_names
|
||||||
GenericsTests.mutable_state_polymorphism
|
|
||||||
GenericsTests.no_stack_overflow_from_quantifying
|
GenericsTests.no_stack_overflow_from_quantifying
|
||||||
GenericsTests.properties_can_be_instantiated_polytypes
|
GenericsTests.properties_can_be_instantiated_polytypes
|
||||||
GenericsTests.quantify_functions_even_if_they_have_an_explicit_generic
|
GenericsTests.quantify_functions_even_if_they_have_an_explicit_generic
|
||||||
|
@ -115,7 +111,6 @@ IntersectionTypes.union_saturate_overloaded_functions
|
||||||
Linter.FormatStringTyped
|
Linter.FormatStringTyped
|
||||||
Linter.TableOperationsIndexer
|
Linter.TableOperationsIndexer
|
||||||
ModuleTests.clone_self_property
|
ModuleTests.clone_self_property
|
||||||
Negations.cofinite_strings_can_be_compared_for_equality
|
|
||||||
Negations.negated_string_is_a_subtype_of_string
|
Negations.negated_string_is_a_subtype_of_string
|
||||||
NonstrictModeTests.inconsistent_module_return_types_are_ok
|
NonstrictModeTests.inconsistent_module_return_types_are_ok
|
||||||
NonstrictModeTests.infer_nullary_function
|
NonstrictModeTests.infer_nullary_function
|
||||||
|
@ -202,6 +197,7 @@ TableTests.explicitly_typed_table_with_indexer
|
||||||
TableTests.generalize_table_argument
|
TableTests.generalize_table_argument
|
||||||
TableTests.generic_table_instantiation_potential_regression
|
TableTests.generic_table_instantiation_potential_regression
|
||||||
TableTests.indexer_mismatch
|
TableTests.indexer_mismatch
|
||||||
|
TableTests.indexer_on_sealed_table_must_unify_with_free_table
|
||||||
TableTests.indexers_get_quantified_too
|
TableTests.indexers_get_quantified_too
|
||||||
TableTests.infer_indexer_from_array_like_table
|
TableTests.infer_indexer_from_array_like_table
|
||||||
TableTests.infer_indexer_from_its_variable_type_and_unifiable
|
TableTests.infer_indexer_from_its_variable_type_and_unifiable
|
||||||
|
@ -218,12 +214,15 @@ TableTests.meta_add_inferred
|
||||||
TableTests.metatable_mismatch_should_fail
|
TableTests.metatable_mismatch_should_fail
|
||||||
TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
|
TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
|
||||||
TableTests.mixed_tables_with_implicit_numbered_keys
|
TableTests.mixed_tables_with_implicit_numbered_keys
|
||||||
|
TableTests.nil_assign_doesnt_hit_indexer
|
||||||
TableTests.ok_to_provide_a_subtype_during_construction
|
TableTests.ok_to_provide_a_subtype_during_construction
|
||||||
TableTests.ok_to_set_nil_even_on_non_lvalue_base_expr
|
TableTests.ok_to_set_nil_even_on_non_lvalue_base_expr
|
||||||
TableTests.okay_to_add_property_to_unsealed_tables_by_assignment
|
TableTests.okay_to_add_property_to_unsealed_tables_by_assignment
|
||||||
TableTests.okay_to_add_property_to_unsealed_tables_by_function_call
|
TableTests.okay_to_add_property_to_unsealed_tables_by_function_call
|
||||||
TableTests.only_ascribe_synthetic_names_at_module_scope
|
TableTests.only_ascribe_synthetic_names_at_module_scope
|
||||||
TableTests.open_table_unification_2
|
TableTests.open_table_unification_2
|
||||||
|
TableTests.parameter_was_set_an_indexer_and_bounded_by_another_parameter
|
||||||
|
TableTests.parameter_was_set_an_indexer_and_bounded_by_string
|
||||||
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table
|
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table
|
||||||
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2
|
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2
|
||||||
TableTests.persistent_sealed_table_is_immutable
|
TableTests.persistent_sealed_table_is_immutable
|
||||||
|
@ -256,7 +255,6 @@ TableTests.unification_of_unions_in_a_self_referential_type
|
||||||
TableTests.used_colon_instead_of_dot
|
TableTests.used_colon_instead_of_dot
|
||||||
TableTests.used_dot_instead_of_colon
|
TableTests.used_dot_instead_of_colon
|
||||||
TableTests.when_augmenting_an_unsealed_table_with_an_indexer_apply_the_correct_scope_to_the_indexer_type
|
TableTests.when_augmenting_an_unsealed_table_with_an_indexer_apply_the_correct_scope_to_the_indexer_type
|
||||||
TableTests.wrong_assign_does_hit_indexer
|
|
||||||
ToDot.function
|
ToDot.function
|
||||||
ToString.exhaustive_toString_of_cyclic_table
|
ToString.exhaustive_toString_of_cyclic_table
|
||||||
ToString.free_types
|
ToString.free_types
|
||||||
|
@ -267,7 +265,6 @@ ToString.primitive
|
||||||
ToString.tostring_unsee_ttv_if_array
|
ToString.tostring_unsee_ttv_if_array
|
||||||
ToString.toStringDetailed2
|
ToString.toStringDetailed2
|
||||||
ToString.toStringErrorPack
|
ToString.toStringErrorPack
|
||||||
ToString.toStringNamedFunction_map
|
|
||||||
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
|
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
|
||||||
TryUnifyTests.result_of_failed_typepack_unification_is_constrained
|
TryUnifyTests.result_of_failed_typepack_unification_is_constrained
|
||||||
TryUnifyTests.uninhabited_table_sub_anything
|
TryUnifyTests.uninhabited_table_sub_anything
|
||||||
|
@ -302,10 +299,10 @@ TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error
|
||||||
TypeInfer.dont_ice_when_failing_the_occurs_check
|
TypeInfer.dont_ice_when_failing_the_occurs_check
|
||||||
TypeInfer.dont_report_type_errors_within_an_AstExprError
|
TypeInfer.dont_report_type_errors_within_an_AstExprError
|
||||||
TypeInfer.dont_report_type_errors_within_an_AstStatError
|
TypeInfer.dont_report_type_errors_within_an_AstStatError
|
||||||
|
TypeInfer.follow_on_new_types_in_substitution
|
||||||
TypeInfer.globals
|
TypeInfer.globals
|
||||||
TypeInfer.globals2
|
TypeInfer.globals2
|
||||||
TypeInfer.globals_are_banned_in_strict_mode
|
TypeInfer.globals_are_banned_in_strict_mode
|
||||||
TypeInfer.infer_locals_via_assignment_from_its_call_site
|
|
||||||
TypeInfer.infer_through_group_expr
|
TypeInfer.infer_through_group_expr
|
||||||
TypeInfer.no_stack_overflow_from_isoptional
|
TypeInfer.no_stack_overflow_from_isoptional
|
||||||
TypeInfer.promote_tail_type_packs
|
TypeInfer.promote_tail_type_packs
|
||||||
|
@ -320,6 +317,8 @@ TypeInferAnyError.any_type_propagates
|
||||||
TypeInferAnyError.assign_prop_to_table_by_calling_any_yields_any
|
TypeInferAnyError.assign_prop_to_table_by_calling_any_yields_any
|
||||||
TypeInferAnyError.call_to_any_yields_any
|
TypeInferAnyError.call_to_any_yields_any
|
||||||
TypeInferAnyError.can_subscript_any
|
TypeInferAnyError.can_subscript_any
|
||||||
|
TypeInferAnyError.for_in_loop_iterator_is_error
|
||||||
|
TypeInferAnyError.for_in_loop_iterator_is_error2
|
||||||
TypeInferAnyError.metatable_of_any_can_be_a_table
|
TypeInferAnyError.metatable_of_any_can_be_a_table
|
||||||
TypeInferAnyError.quantify_any_does_not_bind_to_itself
|
TypeInferAnyError.quantify_any_does_not_bind_to_itself
|
||||||
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
|
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
|
||||||
|
@ -357,6 +356,7 @@ TypeInferFunctions.function_exprs_are_generalized_at_signature_scope_not_enclosi
|
||||||
TypeInferFunctions.function_is_supertype_of_concrete_functions
|
TypeInferFunctions.function_is_supertype_of_concrete_functions
|
||||||
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
|
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
|
||||||
TypeInferFunctions.generic_packs_are_not_variadic
|
TypeInferFunctions.generic_packs_are_not_variadic
|
||||||
|
TypeInferFunctions.higher_order_function_3
|
||||||
TypeInferFunctions.higher_order_function_4
|
TypeInferFunctions.higher_order_function_4
|
||||||
TypeInferFunctions.improved_function_arg_mismatch_error_nonstrict
|
TypeInferFunctions.improved_function_arg_mismatch_error_nonstrict
|
||||||
TypeInferFunctions.improved_function_arg_mismatch_errors
|
TypeInferFunctions.improved_function_arg_mismatch_errors
|
||||||
|
@ -419,7 +419,6 @@ TypeInferModules.do_not_modify_imported_types
|
||||||
TypeInferModules.do_not_modify_imported_types_5
|
TypeInferModules.do_not_modify_imported_types_5
|
||||||
TypeInferModules.require
|
TypeInferModules.require
|
||||||
TypeInferOOP.CheckMethodsOfSealed
|
TypeInferOOP.CheckMethodsOfSealed
|
||||||
TypeInferOOP.cycle_between_object_constructor_and_alias
|
|
||||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_another_overload_works
|
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_another_overload_works
|
||||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
|
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
|
||||||
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
|
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
|
||||||
|
|
Loading…
Reference in a new issue