mirror of
https://github.com/luau-lang/luau.git
synced 2024-12-12 13:00:38 +00:00
Sync to upstream/release/631 (#1299)
### What's new * Added lint warning for using redundant `@native` attributes on functions inside a `--!native` module * Improved typechecking speed in old solver for modules with large types ### New Solver * Fixed the length type function sealing the table prematurely * Fixed crashes caused by general table indexing expressions ### VM * Added support for a specialized 3-argument fast-call instruction to improve performance of `vector` constructor, `buffer` writes and a few `bit32` methods --- ### Internal Contributors Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: Aviral Goel <agoel@roblox.com> Co-authored-by: Vighnesh Vijay <vvijay@roblox.com> Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com> --------- Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Alexander McCord <amccord@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: Vighnesh <vvijay@roblox.com> Co-authored-by: Aviral Goel <agoel@roblox.com> Co-authored-by: David Cope <dcope@roblox.com> Co-authored-by: Lily Brown <lbrown@roblox.com>
This commit is contained in:
parent
7d4033071a
commit
caee04d82d
63 changed files with 1433 additions and 1160 deletions
|
@ -300,7 +300,7 @@ public:
|
||||||
* @returns a non-free type that generalizes the argument, or `std::nullopt` if one
|
* @returns a non-free type that generalizes the argument, or `std::nullopt` if one
|
||||||
* does not exist
|
* does not exist
|
||||||
*/
|
*/
|
||||||
std::optional<TypeId> generalizeFreeType(NotNull<Scope> scope, TypeId type);
|
std::optional<TypeId> generalizeFreeType(NotNull<Scope> scope, TypeId type, bool avoidSealingTables = false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks the existing set of constraints to see if there exist any that contain
|
* Checks the existing set of constraints to see if there exist any that contain
|
||||||
|
|
|
@ -8,6 +8,6 @@
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
|
||||||
std::optional<TypeId> generalize(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<DenseHashSet<TypeId>> bakedTypes, TypeId ty);
|
std::optional<TypeId> generalize(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope,
|
||||||
|
NotNull<DenseHashSet<TypeId>> bakedTypes, TypeId ty, /* avoid sealing tables*/ bool avoidSealingTables = false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,12 +27,16 @@ struct ReplaceGenerics : Substitution
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void resetState(const TxnLog* log, TypeArena* arena, NotNull<BuiltinTypes> builtinTypes, TypeLevel level, Scope* scope,
|
||||||
|
const std::vector<TypeId>& generics, const std::vector<TypePackId>& genericPacks);
|
||||||
|
|
||||||
NotNull<BuiltinTypes> builtinTypes;
|
NotNull<BuiltinTypes> builtinTypes;
|
||||||
|
|
||||||
TypeLevel level;
|
TypeLevel level;
|
||||||
Scope* scope;
|
Scope* scope;
|
||||||
std::vector<TypeId> generics;
|
std::vector<TypeId> generics;
|
||||||
std::vector<TypePackId> genericPacks;
|
std::vector<TypePackId> genericPacks;
|
||||||
|
|
||||||
bool ignoreChildren(TypeId ty) override;
|
bool ignoreChildren(TypeId ty) override;
|
||||||
bool isDirty(TypeId ty) override;
|
bool isDirty(TypeId ty) override;
|
||||||
bool isDirty(TypePackId tp) override;
|
bool isDirty(TypePackId tp) override;
|
||||||
|
@ -48,13 +52,19 @@ struct Instantiation : Substitution
|
||||||
, builtinTypes(builtinTypes)
|
, builtinTypes(builtinTypes)
|
||||||
, level(level)
|
, level(level)
|
||||||
, scope(scope)
|
, scope(scope)
|
||||||
|
, reusableReplaceGenerics(log, arena, builtinTypes, level, scope, {}, {})
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void resetState(const TxnLog* log, TypeArena* arena, NotNull<BuiltinTypes> builtinTypes, TypeLevel level, Scope* scope);
|
||||||
|
|
||||||
NotNull<BuiltinTypes> builtinTypes;
|
NotNull<BuiltinTypes> builtinTypes;
|
||||||
|
|
||||||
TypeLevel level;
|
TypeLevel level;
|
||||||
Scope* scope;
|
Scope* scope;
|
||||||
|
|
||||||
|
ReplaceGenerics reusableReplaceGenerics;
|
||||||
|
|
||||||
bool ignoreChildren(TypeId ty) override;
|
bool ignoreChildren(TypeId ty) override;
|
||||||
bool isDirty(TypeId ty) override;
|
bool isDirty(TypeId ty) override;
|
||||||
bool isDirty(TypePackId tp) override;
|
bool isDirty(TypePackId tp) override;
|
||||||
|
|
|
@ -134,7 +134,8 @@ struct Tarjan
|
||||||
TarjanResult visitRoot(TypeId ty);
|
TarjanResult visitRoot(TypeId ty);
|
||||||
TarjanResult visitRoot(TypePackId ty);
|
TarjanResult visitRoot(TypePackId ty);
|
||||||
|
|
||||||
void clearTarjan();
|
// Used to reuse the object for a new operation
|
||||||
|
void clearTarjan(const TxnLog* log);
|
||||||
|
|
||||||
// Get/set the dirty bit for an index (grows the vector if needed)
|
// Get/set the dirty bit for an index (grows the vector if needed)
|
||||||
bool getDirty(int index);
|
bool getDirty(int index);
|
||||||
|
@ -212,6 +213,8 @@ public:
|
||||||
std::optional<TypeId> substitute(TypeId ty);
|
std::optional<TypeId> substitute(TypeId ty);
|
||||||
std::optional<TypePackId> substitute(TypePackId tp);
|
std::optional<TypePackId> substitute(TypePackId tp);
|
||||||
|
|
||||||
|
void resetState(const TxnLog* log, TypeArena* arena);
|
||||||
|
|
||||||
TypeId replace(TypeId ty);
|
TypeId replace(TypeId ty);
|
||||||
TypePackId replace(TypePackId tp);
|
TypePackId replace(TypePackId tp);
|
||||||
|
|
||||||
|
|
|
@ -82,8 +82,8 @@ struct TypeFamilyReductionResult
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
using ReducerFunction = std::function<TypeFamilyReductionResult<T>(T, const std::vector<TypeId>&, const std::vector<TypePackId>&,
|
using ReducerFunction =
|
||||||
NotNull<TypeFamilyContext>)>;
|
std::function<TypeFamilyReductionResult<T>(T, const std::vector<TypeId>&, const std::vector<TypePackId>&, NotNull<TypeFamilyContext>)>;
|
||||||
|
|
||||||
/// Represents a type function that may be applied to map a series of types and
|
/// Represents a type function that may be applied to map a series of types and
|
||||||
/// type packs to a single output type.
|
/// type packs to a single output type.
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "Luau/Anyification.h"
|
#include "Luau/Anyification.h"
|
||||||
#include "Luau/ControlFlow.h"
|
#include "Luau/ControlFlow.h"
|
||||||
#include "Luau/Error.h"
|
#include "Luau/Error.h"
|
||||||
|
#include "Luau/Instantiation.h"
|
||||||
#include "Luau/Module.h"
|
#include "Luau/Module.h"
|
||||||
#include "Luau/Predicate.h"
|
#include "Luau/Predicate.h"
|
||||||
#include "Luau/Substitution.h"
|
#include "Luau/Substitution.h"
|
||||||
|
@ -362,6 +363,8 @@ public:
|
||||||
UnifierSharedState unifierState;
|
UnifierSharedState unifierState;
|
||||||
Normalizer normalizer;
|
Normalizer normalizer;
|
||||||
|
|
||||||
|
Instantiation reusableInstantiation;
|
||||||
|
|
||||||
std::vector<RequireCycle> requireCycles;
|
std::vector<RequireCycle> requireCycles;
|
||||||
|
|
||||||
// Type inference limits
|
// Type inference limits
|
||||||
|
|
|
@ -256,21 +256,44 @@ void registerBuiltinGlobals(Frontend& frontend, GlobalTypes& globals, bool typeC
|
||||||
|
|
||||||
TypeId tableMetaMT = arena.addType(MetatableType{tabTy, genericMT});
|
TypeId tableMetaMT = arena.addType(MetatableType{tabTy, genericMT});
|
||||||
|
|
||||||
|
// getmetatable : <MT>({ @metatable MT, {+ +} }) -> MT
|
||||||
addGlobalBinding(globals, "getmetatable", makeFunction(arena, std::nullopt, {genericMT}, {}, {tableMetaMT}, {genericMT}), "@luau");
|
addGlobalBinding(globals, "getmetatable", makeFunction(arena, std::nullopt, {genericMT}, {}, {tableMetaMT}, {genericMT}), "@luau");
|
||||||
|
|
||||||
// clang-format off
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
// setmetatable<T: {}, MT>(T, MT) -> { @metatable MT, T }
|
{
|
||||||
addGlobalBinding(globals, "setmetatable",
|
TypeId genericT = arena.addType(GenericType{"T"});
|
||||||
arena.addType(
|
TypeId tMetaMT = arena.addType(MetatableType{genericT, genericMT});
|
||||||
FunctionType{
|
|
||||||
{genericMT},
|
// clang-format off
|
||||||
{},
|
// setmetatable<T: {}, MT>(T, MT) -> { @metatable MT, T }
|
||||||
arena.addTypePack(TypePack{{tabTy, genericMT}}),
|
addGlobalBinding(globals, "setmetatable",
|
||||||
arena.addTypePack(TypePack{{tableMetaMT}})
|
arena.addType(
|
||||||
}
|
FunctionType{
|
||||||
), "@luau"
|
{genericT, genericMT},
|
||||||
);
|
{},
|
||||||
// clang-format on
|
arena.addTypePack(TypePack{{genericT, genericMT}}),
|
||||||
|
arena.addTypePack(TypePack{{tMetaMT}})
|
||||||
|
}
|
||||||
|
), "@luau"
|
||||||
|
);
|
||||||
|
// clang-format on
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// clang-format off
|
||||||
|
// setmetatable<T: {}, MT>(T, MT) -> { @metatable MT, T }
|
||||||
|
addGlobalBinding(globals, "setmetatable",
|
||||||
|
arena.addType(
|
||||||
|
FunctionType{
|
||||||
|
{genericMT},
|
||||||
|
{},
|
||||||
|
arena.addTypePack(TypePack{{tabTy, genericMT}}),
|
||||||
|
arena.addTypePack(TypePack{{tableMetaMT}})
|
||||||
|
}
|
||||||
|
), "@luau"
|
||||||
|
);
|
||||||
|
// clang-format on
|
||||||
|
}
|
||||||
|
|
||||||
for (const auto& pair : globals.globalScope->bindings)
|
for (const auto& pair : globals.globalScope->bindings)
|
||||||
{
|
{
|
||||||
|
|
|
@ -787,7 +787,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatLocal* stat
|
||||||
|
|
||||||
auto uc = addConstraint(scope, statLocal->location, UnpackConstraint{valueTypes, rvaluePack});
|
auto uc = addConstraint(scope, statLocal->location, UnpackConstraint{valueTypes, rvaluePack});
|
||||||
|
|
||||||
for (TypeId t: valueTypes)
|
for (TypeId t : valueTypes)
|
||||||
getMutable<BlockedType>(t)->setOwner(uc);
|
getMutable<BlockedType>(t)->setOwner(uc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -920,7 +920,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatForIn* forI
|
||||||
auto iterable = addConstraint(
|
auto iterable = addConstraint(
|
||||||
loopScope, getLocation(forIn->values), IterableConstraint{iterator, variableTypes, forIn->values.data[0], &module->astForInNextTypes});
|
loopScope, getLocation(forIn->values), IterableConstraint{iterator, variableTypes, forIn->values.data[0], &module->astForInNextTypes});
|
||||||
|
|
||||||
for (TypeId var: variableTypes)
|
for (TypeId var : variableTypes)
|
||||||
{
|
{
|
||||||
auto bt = getMutable<BlockedType>(var);
|
auto bt = getMutable<BlockedType>(var);
|
||||||
LUAU_ASSERT(bt);
|
LUAU_ASSERT(bt);
|
||||||
|
@ -1171,7 +1171,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatAssign* ass
|
||||||
|
|
||||||
auto uc = addConstraint(scope, assign->location, UnpackConstraint{valueTypes, resultPack});
|
auto uc = addConstraint(scope, assign->location, UnpackConstraint{valueTypes, resultPack});
|
||||||
|
|
||||||
for (TypeId t: valueTypes)
|
for (TypeId t : valueTypes)
|
||||||
getMutable<BlockedType>(t)->setOwner(uc);
|
getMutable<BlockedType>(t)->setOwner(uc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -694,7 +694,7 @@ bool ConstraintSolver::tryDispatch(const GeneralizationConstraint& c, NotNull<co
|
||||||
}
|
}
|
||||||
|
|
||||||
for (TypeId ty : c.interiorTypes)
|
for (TypeId ty : c.interiorTypes)
|
||||||
generalize(NotNull{arena}, builtinTypes, constraint->scope, generalizedTypes, ty);
|
generalize(NotNull{arena}, builtinTypes, constraint->scope, generalizedTypes, ty, /* avoidSealingTables */ false);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -769,13 +769,8 @@ bool ConstraintSolver::tryDispatch(const IterableConstraint& c, NotNull<const Co
|
||||||
{
|
{
|
||||||
TypeId keyTy = freshType(arena, builtinTypes, constraint->scope);
|
TypeId keyTy = freshType(arena, builtinTypes, constraint->scope);
|
||||||
TypeId valueTy = freshType(arena, builtinTypes, constraint->scope);
|
TypeId valueTy = freshType(arena, builtinTypes, constraint->scope);
|
||||||
TypeId tableTy = arena->addType(TableType{
|
TypeId tableTy =
|
||||||
TableType::Props{},
|
arena->addType(TableType{TableType::Props{}, TableIndexer{keyTy, valueTy}, TypeLevel{}, constraint->scope, TableState::Free});
|
||||||
TableIndexer{keyTy, valueTy},
|
|
||||||
TypeLevel{},
|
|
||||||
constraint->scope,
|
|
||||||
TableState::Free
|
|
||||||
});
|
|
||||||
|
|
||||||
unify(constraint, nextTy, tableTy);
|
unify(constraint, nextTy, tableTy);
|
||||||
|
|
||||||
|
@ -1022,13 +1017,10 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
|
||||||
const TableType* tfTable = getTableType(tf->type);
|
const TableType* tfTable = getTableType(tf->type);
|
||||||
|
|
||||||
//clang-format off
|
//clang-format off
|
||||||
bool needsClone =
|
bool needsClone = follow(tf->type) == target || (tfTable != nullptr && tfTable == getTableType(target)) ||
|
||||||
follow(tf->type) == target ||
|
std::any_of(typeArguments.begin(), typeArguments.end(), [&](const auto& other) {
|
||||||
(tfTable != nullptr && tfTable == getTableType(target)) ||
|
return other == target;
|
||||||
std::any_of(typeArguments.begin(), typeArguments.end(), [&](const auto& other) {
|
});
|
||||||
return other == target;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
//clang-format on
|
//clang-format on
|
||||||
|
|
||||||
// Only tables have the properties we're trying to set.
|
// Only tables have the properties we're trying to set.
|
||||||
|
@ -1446,6 +1438,8 @@ bool ConstraintSolver::tryDispatchHasIndexer(
|
||||||
bind(constraint, resultType, tbl->indexer->indexResultType);
|
bind(constraint, resultType, tbl->indexer->indexResultType);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
else if (auto mt = get<MetatableType>(follow(ft->upperBound)))
|
||||||
|
return tryDispatchHasIndexer(recursionDepth, constraint, mt->table, indexType, resultType, seen);
|
||||||
|
|
||||||
FreeType freeResult{ft->scope, builtinTypes->neverType, builtinTypes->unknownType};
|
FreeType freeResult{ft->scope, builtinTypes->neverType, builtinTypes->unknownType};
|
||||||
emplace<FreeType>(constraint, resultType, freeResult);
|
emplace<FreeType>(constraint, resultType, freeResult);
|
||||||
|
@ -1461,11 +1455,11 @@ bool ConstraintSolver::tryDispatchHasIndexer(
|
||||||
if (auto indexer = tt->indexer)
|
if (auto indexer = tt->indexer)
|
||||||
{
|
{
|
||||||
unify(constraint, indexType, indexer->indexType);
|
unify(constraint, indexType, indexer->indexType);
|
||||||
|
|
||||||
bind(constraint, resultType, indexer->indexResultType);
|
bind(constraint, resultType, indexer->indexResultType);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (tt->state == TableState::Unsealed)
|
|
||||||
|
if (tt->state == TableState::Unsealed)
|
||||||
{
|
{
|
||||||
// FIXME this is greedy.
|
// FIXME this is greedy.
|
||||||
|
|
||||||
|
@ -2067,7 +2061,7 @@ bool ConstraintSolver::tryDispatchIterableTable(TypeId iteratorTy, const Iterabl
|
||||||
}
|
}
|
||||||
|
|
||||||
auto unpack = [&](TypeId ty) {
|
auto unpack = [&](TypeId ty) {
|
||||||
for (TypeId varTy: c.variables)
|
for (TypeId varTy : c.variables)
|
||||||
{
|
{
|
||||||
LUAU_ASSERT(get<BlockedType>(varTy));
|
LUAU_ASSERT(get<BlockedType>(varTy));
|
||||||
LUAU_ASSERT(varTy != ty);
|
LUAU_ASSERT(varTy != ty);
|
||||||
|
@ -2228,11 +2222,12 @@ bool ConstraintSolver::tryDispatchIterableFunction(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
NotNull<const Constraint> ConstraintSolver::unpackAndAssign(const std::vector<TypeId> destTypes, TypePackId srcTypes, NotNull<const Constraint> constraint)
|
NotNull<const Constraint> ConstraintSolver::unpackAndAssign(
|
||||||
|
const std::vector<TypeId> destTypes, TypePackId srcTypes, NotNull<const Constraint> constraint)
|
||||||
{
|
{
|
||||||
auto c = pushConstraint(constraint->scope, constraint->location, UnpackConstraint{destTypes, srcTypes});
|
auto c = pushConstraint(constraint->scope, constraint->location, UnpackConstraint{destTypes, srcTypes});
|
||||||
|
|
||||||
for (TypeId t: destTypes)
|
for (TypeId t : destTypes)
|
||||||
{
|
{
|
||||||
BlockedType* bt = getMutable<BlockedType>(t);
|
BlockedType* bt = getMutable<BlockedType>(t);
|
||||||
LUAU_ASSERT(bt);
|
LUAU_ASSERT(bt);
|
||||||
|
@ -2834,7 +2829,7 @@ void ConstraintSolver::shiftReferences(TypeId source, TypeId target)
|
||||||
targetRefs += count;
|
targetRefs += count;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<TypeId> ConstraintSolver::generalizeFreeType(NotNull<Scope> scope, TypeId type)
|
std::optional<TypeId> ConstraintSolver::generalizeFreeType(NotNull<Scope> scope, TypeId type, bool avoidSealingTables)
|
||||||
{
|
{
|
||||||
TypeId t = follow(type);
|
TypeId t = follow(type);
|
||||||
if (get<FreeType>(t))
|
if (get<FreeType>(t))
|
||||||
|
@ -2849,7 +2844,7 @@ std::optional<TypeId> ConstraintSolver::generalizeFreeType(NotNull<Scope> scope,
|
||||||
// that until all constraint generation is complete.
|
// that until all constraint generation is complete.
|
||||||
}
|
}
|
||||||
|
|
||||||
return generalize(NotNull{arena}, builtinTypes, scope, generalizedTypes, type);
|
return generalize(NotNull{arena}, builtinTypes, scope, generalizedTypes, type, avoidSealingTables);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ConstraintSolver::hasUnresolvedConstraints(TypeId ty)
|
bool ConstraintSolver::hasUnresolvedConstraints(TypeId ty)
|
||||||
|
|
|
@ -65,21 +65,15 @@ namespace Luau
|
||||||
{
|
{
|
||||||
|
|
||||||
// this list of binary operator type families is used for better stringification of type families errors
|
// this list of binary operator type families is used for better stringification of type families errors
|
||||||
static const std::unordered_map<std::string, const char*> kBinaryOps{
|
static const std::unordered_map<std::string, const char*> kBinaryOps{{"add", "+"}, {"sub", "-"}, {"mul", "*"}, {"div", "/"}, {"idiv", "//"},
|
||||||
{"add", "+"}, {"sub", "-"}, {"mul", "*"}, {"div", "/"}, {"idiv", "//"}, {"pow", "^"}, {"mod", "%"}, {"concat", ".."}, {"and", "and"},
|
{"pow", "^"}, {"mod", "%"}, {"concat", ".."}, {"and", "and"}, {"or", "or"}, {"lt", "< or >="}, {"le", "<= or >"}, {"eq", "== or ~="}};
|
||||||
{"or", "or"}, {"lt", "< or >="}, {"le", "<= or >"}, {"eq", "== or ~="}
|
|
||||||
};
|
|
||||||
|
|
||||||
// this list of unary operator type families is used for better stringification of type families errors
|
// this list of unary operator type families is used for better stringification of type families errors
|
||||||
static const std::unordered_map<std::string, const char*> kUnaryOps{
|
static const std::unordered_map<std::string, const char*> kUnaryOps{{"unm", "-"}, {"len", "#"}, {"not", "not"}};
|
||||||
{"unm", "-"}, {"len", "#"}, {"not", "not"}
|
|
||||||
};
|
|
||||||
|
|
||||||
// this list of type families will receive a special error indicating that the user should file a bug on the GitHub repository
|
// this list of type families will receive a special error indicating that the user should file a bug on the GitHub repository
|
||||||
// putting a type family in this list indicates that it is expected to _always_ reduce
|
// putting a type family in this list indicates that it is expected to _always_ reduce
|
||||||
static const std::unordered_set<std::string> kUnreachableTypeFamilies{
|
static const std::unordered_set<std::string> kUnreachableTypeFamilies{"refine", "singleton", "union", "intersect"};
|
||||||
"refine", "singleton", "union", "intersect"
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ErrorConverter
|
struct ErrorConverter
|
||||||
{
|
{
|
||||||
|
@ -682,7 +676,7 @@ struct ErrorConverter
|
||||||
if (kUnreachableTypeFamilies.count(tfit->family->name))
|
if (kUnreachableTypeFamilies.count(tfit->family->name))
|
||||||
{
|
{
|
||||||
return "Type family instance " + Luau::toString(e.ty) + " is uninhabited\n" +
|
return "Type family instance " + Luau::toString(e.ty) + " is uninhabited\n" +
|
||||||
"This is likely to be a bug, please report it at https://github.com/luau-lang/luau/issues";
|
"This is likely to be a bug, please report it at https://github.com/luau-lang/luau/issues";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Everything should be specialized above to report a more descriptive error that hopefully does not mention "type families" explicitly.
|
// Everything should be specialized above to report a more descriptive error that hopefully does not mention "type families" explicitly.
|
||||||
|
|
|
@ -24,15 +24,17 @@ struct MutatingGeneralizer : TypeOnceVisitor
|
||||||
std::vector<TypePackId> genericPacks;
|
std::vector<TypePackId> genericPacks;
|
||||||
|
|
||||||
bool isWithinFunction = false;
|
bool isWithinFunction = false;
|
||||||
|
bool avoidSealingTables = false;
|
||||||
|
|
||||||
MutatingGeneralizer(NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<DenseHashSet<TypeId>> cachedTypes, DenseHashMap<const void*, size_t> positiveTypes,
|
MutatingGeneralizer(NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<DenseHashSet<TypeId>> cachedTypes,
|
||||||
DenseHashMap<const void*, size_t> negativeTypes)
|
DenseHashMap<const void*, size_t> positiveTypes, DenseHashMap<const void*, size_t> negativeTypes, bool avoidSealingTables)
|
||||||
: TypeOnceVisitor(/* skipBoundTypes */ true)
|
: TypeOnceVisitor(/* skipBoundTypes */ true)
|
||||||
, builtinTypes(builtinTypes)
|
, builtinTypes(builtinTypes)
|
||||||
, scope(scope)
|
, scope(scope)
|
||||||
, cachedTypes(cachedTypes)
|
, cachedTypes(cachedTypes)
|
||||||
, positiveTypes(std::move(positiveTypes))
|
, positiveTypes(std::move(positiveTypes))
|
||||||
, negativeTypes(std::move(negativeTypes))
|
, negativeTypes(std::move(negativeTypes))
|
||||||
|
, avoidSealingTables(avoidSealingTables)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,7 +270,8 @@ struct MutatingGeneralizer : TypeOnceVisitor
|
||||||
TableType* tt = getMutable<TableType>(ty);
|
TableType* tt = getMutable<TableType>(ty);
|
||||||
LUAU_ASSERT(tt);
|
LUAU_ASSERT(tt);
|
||||||
|
|
||||||
tt->state = TableState::Sealed;
|
if (!avoidSealingTables)
|
||||||
|
tt->state = TableState::Sealed;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -338,31 +341,31 @@ struct FreeTypeSearcher : TypeVisitor
|
||||||
{
|
{
|
||||||
switch (polarity)
|
switch (polarity)
|
||||||
{
|
{
|
||||||
case Positive:
|
case Positive:
|
||||||
{
|
{
|
||||||
if (seenPositive.contains(ty))
|
if (seenPositive.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenPositive.insert(ty);
|
seenPositive.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
case Negative:
|
case Negative:
|
||||||
{
|
{
|
||||||
if (seenNegative.contains(ty))
|
if (seenNegative.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenNegative.insert(ty);
|
seenNegative.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
case Both:
|
case Both:
|
||||||
{
|
{
|
||||||
if (seenPositive.contains(ty) && seenNegative.contains(ty))
|
if (seenPositive.contains(ty) && seenNegative.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenPositive.insert(ty);
|
seenPositive.insert(ty);
|
||||||
seenNegative.insert(ty);
|
seenNegative.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
@ -519,7 +522,8 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
explicit TypeCacher(NotNull<DenseHashSet<TypeId>> cachedTypes)
|
explicit TypeCacher(NotNull<DenseHashSet<TypeId>> cachedTypes)
|
||||||
: TypeOnceVisitor(/* skipBoundTypes */ true)
|
: TypeOnceVisitor(/* skipBoundTypes */ true)
|
||||||
, cachedTypes(cachedTypes)
|
, cachedTypes(cachedTypes)
|
||||||
{}
|
{
|
||||||
|
}
|
||||||
|
|
||||||
void cache(TypeId ty)
|
void cache(TypeId ty)
|
||||||
{
|
{
|
||||||
|
@ -611,7 +615,7 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
|
|
||||||
traverse(ft.argTypes);
|
traverse(ft.argTypes);
|
||||||
traverse(ft.retTypes);
|
traverse(ft.retTypes);
|
||||||
for (TypeId gen: ft.generics)
|
for (TypeId gen : ft.generics)
|
||||||
traverse(gen);
|
traverse(gen);
|
||||||
|
|
||||||
bool uncacheable = false;
|
bool uncacheable = false;
|
||||||
|
@ -622,7 +626,7 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
else if (isUncacheable(ft.retTypes))
|
else if (isUncacheable(ft.retTypes))
|
||||||
uncacheable = true;
|
uncacheable = true;
|
||||||
|
|
||||||
for (TypeId argTy: ft.argTypes)
|
for (TypeId argTy : ft.argTypes)
|
||||||
{
|
{
|
||||||
if (isUncacheable(argTy))
|
if (isUncacheable(argTy))
|
||||||
{
|
{
|
||||||
|
@ -631,7 +635,7 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (TypeId retTy: ft.retTypes)
|
for (TypeId retTy : ft.retTypes)
|
||||||
{
|
{
|
||||||
if (isUncacheable(retTy))
|
if (isUncacheable(retTy))
|
||||||
{
|
{
|
||||||
|
@ -640,7 +644,7 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (TypeId g: ft.generics)
|
for (TypeId g : ft.generics)
|
||||||
{
|
{
|
||||||
if (isUncacheable(g))
|
if (isUncacheable(g))
|
||||||
{
|
{
|
||||||
|
@ -863,7 +867,8 @@ struct TypeCacher : TypeOnceVisitor
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::optional<TypeId> generalize(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope, NotNull<DenseHashSet<TypeId>> cachedTypes, TypeId ty)
|
std::optional<TypeId> generalize(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtinTypes, NotNull<Scope> scope,
|
||||||
|
NotNull<DenseHashSet<TypeId>> cachedTypes, TypeId ty, bool avoidSealingTables)
|
||||||
{
|
{
|
||||||
ty = follow(ty);
|
ty = follow(ty);
|
||||||
|
|
||||||
|
@ -876,14 +881,14 @@ std::optional<TypeId> generalize(NotNull<TypeArena> arena, NotNull<BuiltinTypes>
|
||||||
FreeTypeSearcher fts{scope, cachedTypes};
|
FreeTypeSearcher fts{scope, cachedTypes};
|
||||||
fts.traverse(ty);
|
fts.traverse(ty);
|
||||||
|
|
||||||
MutatingGeneralizer gen{builtinTypes, scope, cachedTypes, std::move(fts.positiveTypes), std::move(fts.negativeTypes)};
|
MutatingGeneralizer gen{builtinTypes, scope, cachedTypes, std::move(fts.positiveTypes), std::move(fts.negativeTypes), avoidSealingTables};
|
||||||
|
|
||||||
gen.traverse(ty);
|
gen.traverse(ty);
|
||||||
|
|
||||||
/* MutatingGeneralizer mutates types in place, so it is possible that ty has
|
/* MutatingGeneralizer mutates types in place, so it is possible that ty has
|
||||||
* been transmuted to a BoundType. We must follow it again and verify that
|
* been transmuted to a BoundType. We must follow it again and verify that
|
||||||
* we are allowed to mutate it before we attach generics to it.
|
* we are allowed to mutate it before we attach generics to it.
|
||||||
*/
|
*/
|
||||||
ty = follow(ty);
|
ty = follow(ty);
|
||||||
|
|
||||||
if (ty->owningArena != arena || ty->persistent)
|
if (ty->owningArena != arena || ty->persistent)
|
||||||
|
|
|
@ -11,10 +11,23 @@
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||||
|
LUAU_FASTFLAG(LuauReusableSubstitutions)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
|
||||||
|
void Instantiation::resetState(const TxnLog* log, TypeArena* arena, NotNull<BuiltinTypes> builtinTypes, TypeLevel level, Scope* scope)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(FFlag::LuauReusableSubstitutions);
|
||||||
|
|
||||||
|
Substitution::resetState(log, arena);
|
||||||
|
|
||||||
|
this->builtinTypes = builtinTypes;
|
||||||
|
|
||||||
|
this->level = level;
|
||||||
|
this->scope = scope;
|
||||||
|
}
|
||||||
|
|
||||||
bool Instantiation::isDirty(TypeId ty)
|
bool Instantiation::isDirty(TypeId ty)
|
||||||
{
|
{
|
||||||
if (const FunctionType* ftv = log->getMutable<FunctionType>(ty))
|
if (const FunctionType* ftv = log->getMutable<FunctionType>(ty))
|
||||||
|
@ -58,13 +71,26 @@ TypeId Instantiation::clean(TypeId ty)
|
||||||
clone.argNames = ftv->argNames;
|
clone.argNames = ftv->argNames;
|
||||||
TypeId result = addType(std::move(clone));
|
TypeId result = addType(std::move(clone));
|
||||||
|
|
||||||
// Annoyingly, we have to do this even if there are no generics,
|
if (FFlag::LuauReusableSubstitutions)
|
||||||
// to replace any generic tables.
|
{
|
||||||
ReplaceGenerics replaceGenerics{log, arena, builtinTypes, level, scope, ftv->generics, ftv->genericPacks};
|
// Annoyingly, we have to do this even if there are no generics,
|
||||||
|
// to replace any generic tables.
|
||||||
|
reusableReplaceGenerics.resetState(log, arena, builtinTypes, level, scope, ftv->generics, ftv->genericPacks);
|
||||||
|
|
||||||
// TODO: What to do if this returns nullopt?
|
// TODO: What to do if this returns nullopt?
|
||||||
// We don't have access to the error-reporting machinery
|
// We don't have access to the error-reporting machinery
|
||||||
result = replaceGenerics.substitute(result).value_or(result);
|
result = reusableReplaceGenerics.substitute(result).value_or(result);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Annoyingly, we have to do this even if there are no generics,
|
||||||
|
// to replace any generic tables.
|
||||||
|
ReplaceGenerics replaceGenerics{log, arena, builtinTypes, level, scope, ftv->generics, ftv->genericPacks};
|
||||||
|
|
||||||
|
// TODO: What to do if this returns nullopt?
|
||||||
|
// We don't have access to the error-reporting machinery
|
||||||
|
result = replaceGenerics.substitute(result).value_or(result);
|
||||||
|
}
|
||||||
|
|
||||||
asMutable(result)->documentationSymbol = ty->documentationSymbol;
|
asMutable(result)->documentationSymbol = ty->documentationSymbol;
|
||||||
return result;
|
return result;
|
||||||
|
@ -76,6 +102,22 @@ TypePackId Instantiation::clean(TypePackId tp)
|
||||||
return tp;
|
return tp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ReplaceGenerics::resetState(const TxnLog* log, TypeArena* arena, NotNull<BuiltinTypes> builtinTypes, TypeLevel level, Scope* scope,
|
||||||
|
const std::vector<TypeId>& generics, const std::vector<TypePackId>& genericPacks)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(FFlag::LuauReusableSubstitutions);
|
||||||
|
|
||||||
|
Substitution::resetState(log, arena);
|
||||||
|
|
||||||
|
this->builtinTypes = builtinTypes;
|
||||||
|
|
||||||
|
this->level = level;
|
||||||
|
this->scope = scope;
|
||||||
|
|
||||||
|
this->generics = generics;
|
||||||
|
this->genericPacks = genericPacks;
|
||||||
|
}
|
||||||
|
|
||||||
bool ReplaceGenerics::ignoreChildren(TypeId ty)
|
bool ReplaceGenerics::ignoreChildren(TypeId ty)
|
||||||
{
|
{
|
||||||
if (const FunctionType* ftv = log->getMutable<FunctionType>(ty))
|
if (const FunctionType* ftv = log->getMutable<FunctionType>(ty))
|
||||||
|
|
|
@ -16,6 +16,11 @@ LUAU_FASTINTVARIABLE(LuauSuggestionDistance, 4)
|
||||||
|
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||||
|
|
||||||
|
LUAU_FASTFLAG(LuauAttributeSyntax)
|
||||||
|
LUAU_FASTFLAG(LuauAttribute)
|
||||||
|
LUAU_FASTFLAG(LuauNativeAttribute)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LintRedundantNativeAttribute, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
|
||||||
|
@ -2922,6 +2927,64 @@ static void lintComments(LintContext& context, const std::vector<HotComment>& ho
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool hasNativeCommentDirective(const std::vector<HotComment>& hotcomments)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(FFlag::LuauAttributeSyntax);
|
||||||
|
LUAU_ASSERT(FFlag::LuauNativeAttribute);
|
||||||
|
LUAU_ASSERT(FFlag::LintRedundantNativeAttribute);
|
||||||
|
|
||||||
|
for (const HotComment& hc : hotcomments)
|
||||||
|
{
|
||||||
|
if (hc.content.empty() || hc.content[0] == ' ' || hc.content[0] == '\t')
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (hc.header)
|
||||||
|
{
|
||||||
|
size_t space = hc.content.find_first_of(" \t");
|
||||||
|
std::string_view first = std::string_view(hc.content).substr(0, space);
|
||||||
|
|
||||||
|
if (first == "native")
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct LintRedundantNativeAttribute : AstVisitor
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
LUAU_NOINLINE static void process(LintContext& context)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(FFlag::LuauAttributeSyntax);
|
||||||
|
LUAU_ASSERT(FFlag::LuauNativeAttribute);
|
||||||
|
LUAU_ASSERT(FFlag::LintRedundantNativeAttribute);
|
||||||
|
|
||||||
|
LintRedundantNativeAttribute pass;
|
||||||
|
pass.context = &context;
|
||||||
|
context.root->visit(&pass);
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
LintContext* context;
|
||||||
|
|
||||||
|
bool visit(AstExprFunction* node) override
|
||||||
|
{
|
||||||
|
node->body->visit(this);
|
||||||
|
|
||||||
|
for (const auto attribute : node->attributes)
|
||||||
|
{
|
||||||
|
if (attribute->type == AstAttr::Type::Native)
|
||||||
|
{
|
||||||
|
emitWarning(*context, LintWarning::Code_RedundantNativeAttribute, attribute->location,
|
||||||
|
"native attribute on a function is redundant in a native module; consider removing it");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
std::vector<LintWarning> lint(AstStat* root, const AstNameTable& names, const ScopePtr& env, const Module* module,
|
std::vector<LintWarning> lint(AstStat* root, const AstNameTable& names, const ScopePtr& env, const Module* module,
|
||||||
const std::vector<HotComment>& hotcomments, const LintOptions& options)
|
const std::vector<HotComment>& hotcomments, const LintOptions& options)
|
||||||
{
|
{
|
||||||
|
@ -3008,6 +3071,13 @@ std::vector<LintWarning> lint(AstStat* root, const AstNameTable& names, const Sc
|
||||||
if (context.warningEnabled(LintWarning::Code_ComparisonPrecedence))
|
if (context.warningEnabled(LintWarning::Code_ComparisonPrecedence))
|
||||||
LintComparisonPrecedence::process(context);
|
LintComparisonPrecedence::process(context);
|
||||||
|
|
||||||
|
if (FFlag::LuauAttributeSyntax && FFlag::LuauNativeAttribute && FFlag::LintRedundantNativeAttribute &&
|
||||||
|
context.warningEnabled(LintWarning::Code_RedundantNativeAttribute))
|
||||||
|
{
|
||||||
|
if (hasNativeCommentDirective(hotcomments))
|
||||||
|
LintRedundantNativeAttribute::process(context);
|
||||||
|
}
|
||||||
|
|
||||||
std::sort(context.result.begin(), context.result.end(), WarningComparator());
|
std::sort(context.result.begin(), context.result.end(), WarningComparator());
|
||||||
|
|
||||||
return context.result;
|
return context.result;
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
LUAU_FASTINTVARIABLE(LuauTarjanChildLimit, 10000)
|
LUAU_FASTINTVARIABLE(LuauTarjanChildLimit, 10000)
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
||||||
LUAU_FASTINTVARIABLE(LuauTarjanPreallocationSize, 256);
|
LUAU_FASTINTVARIABLE(LuauTarjanPreallocationSize, 256);
|
||||||
|
LUAU_FASTFLAG(LuauReusableSubstitutions)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -146,6 +147,8 @@ static TypeId shallowClone(TypeId ty, TypeArena& dest, const TxnLog* log, bool a
|
||||||
}
|
}
|
||||||
|
|
||||||
Tarjan::Tarjan()
|
Tarjan::Tarjan()
|
||||||
|
: typeToIndex(nullptr, FFlag::LuauReusableSubstitutions ? FInt::LuauTarjanPreallocationSize : 0)
|
||||||
|
, packToIndex(nullptr, FFlag::LuauReusableSubstitutions ? FInt::LuauTarjanPreallocationSize : 0)
|
||||||
{
|
{
|
||||||
nodes.reserve(FInt::LuauTarjanPreallocationSize);
|
nodes.reserve(FInt::LuauTarjanPreallocationSize);
|
||||||
stack.reserve(FInt::LuauTarjanPreallocationSize);
|
stack.reserve(FInt::LuauTarjanPreallocationSize);
|
||||||
|
@ -446,14 +449,31 @@ TarjanResult Tarjan::visitRoot(TypePackId tp)
|
||||||
return loop();
|
return loop();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Tarjan::clearTarjan()
|
void Tarjan::clearTarjan(const TxnLog* log)
|
||||||
{
|
{
|
||||||
typeToIndex.clear();
|
if (FFlag::LuauReusableSubstitutions)
|
||||||
packToIndex.clear();
|
{
|
||||||
|
typeToIndex.clear(~0u);
|
||||||
|
packToIndex.clear(~0u);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
typeToIndex.clear();
|
||||||
|
packToIndex.clear();
|
||||||
|
}
|
||||||
|
|
||||||
nodes.clear();
|
nodes.clear();
|
||||||
|
|
||||||
stack.clear();
|
stack.clear();
|
||||||
|
|
||||||
|
if (FFlag::LuauReusableSubstitutions)
|
||||||
|
{
|
||||||
|
childCount = 0;
|
||||||
|
// childLimit setting stays the same
|
||||||
|
|
||||||
|
this->log = log;
|
||||||
|
}
|
||||||
|
|
||||||
edgesTy.clear();
|
edgesTy.clear();
|
||||||
edgesTp.clear();
|
edgesTp.clear();
|
||||||
worklist.clear();
|
worklist.clear();
|
||||||
|
@ -528,7 +548,6 @@ Substitution::Substitution(const TxnLog* log_, TypeArena* arena)
|
||||||
{
|
{
|
||||||
log = log_;
|
log = log_;
|
||||||
LUAU_ASSERT(log);
|
LUAU_ASSERT(log);
|
||||||
LUAU_ASSERT(arena);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Substitution::dontTraverseInto(TypeId ty)
|
void Substitution::dontTraverseInto(TypeId ty)
|
||||||
|
@ -546,7 +565,7 @@ std::optional<TypeId> Substitution::substitute(TypeId ty)
|
||||||
ty = log->follow(ty);
|
ty = log->follow(ty);
|
||||||
|
|
||||||
// clear algorithm state for reentrancy
|
// clear algorithm state for reentrancy
|
||||||
clearTarjan();
|
clearTarjan(log);
|
||||||
|
|
||||||
auto result = findDirty(ty);
|
auto result = findDirty(ty);
|
||||||
if (result != TarjanResult::Ok)
|
if (result != TarjanResult::Ok)
|
||||||
|
@ -579,7 +598,7 @@ std::optional<TypePackId> Substitution::substitute(TypePackId tp)
|
||||||
tp = log->follow(tp);
|
tp = log->follow(tp);
|
||||||
|
|
||||||
// clear algorithm state for reentrancy
|
// clear algorithm state for reentrancy
|
||||||
clearTarjan();
|
clearTarjan(log);
|
||||||
|
|
||||||
auto result = findDirty(tp);
|
auto result = findDirty(tp);
|
||||||
if (result != TarjanResult::Ok)
|
if (result != TarjanResult::Ok)
|
||||||
|
@ -607,6 +626,23 @@ std::optional<TypePackId> Substitution::substitute(TypePackId tp)
|
||||||
return newTp;
|
return newTp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Substitution::resetState(const TxnLog* log, TypeArena* arena)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(FFlag::LuauReusableSubstitutions);
|
||||||
|
|
||||||
|
clearTarjan(log);
|
||||||
|
|
||||||
|
this->arena = arena;
|
||||||
|
|
||||||
|
newTypes.clear();
|
||||||
|
newPacks.clear();
|
||||||
|
replacedTypes.clear();
|
||||||
|
replacedTypePacks.clear();
|
||||||
|
|
||||||
|
noTraverseTypes.clear();
|
||||||
|
noTraverseTypePacks.clear();
|
||||||
|
}
|
||||||
|
|
||||||
TypeId Substitution::clone(TypeId ty)
|
TypeId Substitution::clone(TypeId ty)
|
||||||
{
|
{
|
||||||
return shallowClone(ty, *arena, log, /* alwaysClone */ true);
|
return shallowClone(ty, *arena, log, /* alwaysClone */ true);
|
||||||
|
|
|
@ -1540,6 +1540,24 @@ struct TypeChecker2
|
||||||
visitExprName(indexName->expr, indexName->location, indexName->index.value, context, builtinTypes->stringType);
|
visitExprName(indexName->expr, indexName->location, indexName->index.value, context, builtinTypes->stringType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void indexExprMetatableHelper(AstExprIndexExpr* indexExpr, const MetatableType* metaTable, TypeId exprType, TypeId indexType)
|
||||||
|
{
|
||||||
|
if (auto tt = get<TableType>(follow(metaTable->table)); tt && tt->indexer)
|
||||||
|
testIsSubtype(indexType, tt->indexer->indexType, indexExpr->index->location);
|
||||||
|
else if (auto mt = get<MetatableType>(follow(metaTable->table)))
|
||||||
|
indexExprMetatableHelper(indexExpr, mt, exprType, indexType);
|
||||||
|
else if (auto tmt = get<TableType>(follow(metaTable->metatable)); tmt && tmt->indexer)
|
||||||
|
testIsSubtype(indexType, tmt->indexer->indexType, indexExpr->index->location);
|
||||||
|
else if (auto mtmt = get<MetatableType>(follow(metaTable->metatable)))
|
||||||
|
indexExprMetatableHelper(indexExpr, mtmt, exprType, indexType);
|
||||||
|
else
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(tt || get<PrimitiveType>(follow(metaTable->table)));
|
||||||
|
|
||||||
|
reportError(CannotExtendTable{exprType, CannotExtendTable::Indexer, "indexer??"}, indexExpr->location);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void visit(AstExprIndexExpr* indexExpr, ValueContext context)
|
void visit(AstExprIndexExpr* indexExpr, ValueContext context)
|
||||||
{
|
{
|
||||||
if (auto str = indexExpr->index->as<AstExprConstantString>())
|
if (auto str = indexExpr->index->as<AstExprConstantString>())
|
||||||
|
@ -1565,15 +1583,7 @@ struct TypeChecker2
|
||||||
}
|
}
|
||||||
else if (auto mt = get<MetatableType>(exprType))
|
else if (auto mt = get<MetatableType>(exprType))
|
||||||
{
|
{
|
||||||
const TableType* tt = get<TableType>(follow(mt->table));
|
return indexExprMetatableHelper(indexExpr, mt, exprType, indexType);
|
||||||
LUAU_ASSERT(tt);
|
|
||||||
if (tt->indexer)
|
|
||||||
testIsSubtype(indexType, tt->indexer->indexType, indexExpr->index->location);
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// TODO: Maybe the metatable has a suitable indexer?
|
|
||||||
reportError(CannotExtendTable{exprType, CannotExtendTable::Indexer, "indexer??"}, indexExpr->location);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else if (auto cls = get<ClassType>(exprType))
|
else if (auto cls = get<ClassType>(exprType))
|
||||||
{
|
{
|
||||||
|
|
|
@ -344,8 +344,7 @@ struct FamilyReducer
|
||||||
if (tryGuessing(subject))
|
if (tryGuessing(subject))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> result =
|
TypeFamilyReductionResult<TypeId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
||||||
tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
|
||||||
handleFamilyReduction(subject, result);
|
handleFamilyReduction(subject, result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -369,8 +368,7 @@ struct FamilyReducer
|
||||||
if (tryGuessing(subject))
|
if (tryGuessing(subject))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypePackId> result =
|
TypeFamilyReductionResult<TypePackId> result = tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
||||||
tfit->family->reducer(subject, tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
|
|
||||||
handleFamilyReduction(subject, result);
|
handleFamilyReduction(subject, result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -451,8 +449,8 @@ bool isPending(TypeId ty, ConstraintSolver* solver)
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename F, typename... Args>
|
template<typename F, typename... Args>
|
||||||
static std::optional<TypeFamilyReductionResult<TypeId>> tryDistributeTypeFamilyApp(F f, TypeId instance,
|
static std::optional<TypeFamilyReductionResult<TypeId>> tryDistributeTypeFamilyApp(F f, TypeId instance, const std::vector<TypeId>& typeParams,
|
||||||
const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx, Args&& ...args)
|
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx, Args&&... args)
|
||||||
{
|
{
|
||||||
// op (a | b) (c | d) ~ (op a (c | d)) | (op b (c | d)) ~ (op a c) | (op a d) | (op b c) | (op b d)
|
// op (a | b) (c | d) ~ (op a (c | d)) | (op b (c | d)) ~ (op a c) | (op a d) | (op b c) | (op b d)
|
||||||
bool uninhabited = false;
|
bool uninhabited = false;
|
||||||
|
@ -527,8 +525,8 @@ static std::optional<TypeFamilyReductionResult<TypeId>> tryDistributeTypeFamilyA
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> notFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> notFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -551,8 +549,8 @@ TypeFamilyReductionResult<TypeId> notFamilyFn(TypeId instance, const std::vector
|
||||||
return {ctx->builtins->booleanType, false, {}, {}};
|
return {ctx->builtins->booleanType, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> lenFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> lenFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -573,7 +571,7 @@ TypeFamilyReductionResult<TypeId> lenFamilyFn(TypeId instance, const std::vector
|
||||||
// if the type is free but has only one remaining reference, we can generalize it to its upper bound here.
|
// if the type is free but has only one remaining reference, we can generalize it to its upper bound here.
|
||||||
if (ctx->solver)
|
if (ctx->solver)
|
||||||
{
|
{
|
||||||
std::optional<TypeId> maybeGeneralized = ctx->solver->generalizeFreeType(ctx->scope, operandTy);
|
std::optional<TypeId> maybeGeneralized = ctx->solver->generalizeFreeType(ctx->scope, operandTy, /* avoidSealingTables */ true);
|
||||||
if (!maybeGeneralized)
|
if (!maybeGeneralized)
|
||||||
return {std::nullopt, false, {operandTy}, {}};
|
return {std::nullopt, false, {operandTy}, {}};
|
||||||
operandTy = *maybeGeneralized;
|
operandTy = *maybeGeneralized;
|
||||||
|
@ -643,8 +641,8 @@ TypeFamilyReductionResult<TypeId> lenFamilyFn(TypeId instance, const std::vector
|
||||||
return {ctx->builtins->numberType, false, {}, {}};
|
return {ctx->builtins->numberType, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> unmFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> unmFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -846,8 +844,8 @@ TypeFamilyReductionResult<TypeId> numericBinopFamilyFn(TypeId instance, const st
|
||||||
return {extracted.head.front(), false, {}, {}};
|
return {extracted.head.front(), false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> addFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> addFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -858,8 +856,8 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__add");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__add");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> subFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> subFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -870,8 +868,8 @@ TypeFamilyReductionResult<TypeId> subFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__sub");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__sub");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> mulFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> mulFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -882,8 +880,8 @@ TypeFamilyReductionResult<TypeId> mulFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__mul");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__mul");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> divFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> divFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -894,8 +892,8 @@ TypeFamilyReductionResult<TypeId> divFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__div");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__div");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> idivFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> idivFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -906,8 +904,8 @@ TypeFamilyReductionResult<TypeId> idivFamilyFn(TypeId instance, const std::vecto
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__idiv");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__idiv");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> powFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> powFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -918,8 +916,8 @@ TypeFamilyReductionResult<TypeId> powFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__pow");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__pow");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> modFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> modFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -930,8 +928,8 @@ TypeFamilyReductionResult<TypeId> modFamilyFn(TypeId instance, const std::vector
|
||||||
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__mod");
|
return numericBinopFamilyFn(instance, typeParams, packParams, ctx, "__mod");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> concatFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> concatFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1038,8 +1036,8 @@ TypeFamilyReductionResult<TypeId> concatFamilyFn(TypeId instance, const std::vec
|
||||||
return {ctx->builtins->stringType, false, {}, {}};
|
return {ctx->builtins->stringType, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> andFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> andFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1089,8 +1087,8 @@ TypeFamilyReductionResult<TypeId> andFamilyFn(TypeId instance, const std::vector
|
||||||
return {overallResult.result, false, std::move(blockedTypes), {}};
|
return {overallResult.result, false, std::move(blockedTypes), {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> orFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> orFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1279,8 +1277,8 @@ static TypeFamilyReductionResult<TypeId> comparisonFamilyFn(TypeId instance, con
|
||||||
return {ctx->builtins->booleanType, false, {}, {}};
|
return {ctx->builtins->booleanType, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> ltFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> ltFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1291,8 +1289,8 @@ TypeFamilyReductionResult<TypeId> ltFamilyFn(TypeId instance, const std::vector<
|
||||||
return comparisonFamilyFn(instance, typeParams, packParams, ctx, "__lt");
|
return comparisonFamilyFn(instance, typeParams, packParams, ctx, "__lt");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> leFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> leFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1303,8 +1301,8 @@ TypeFamilyReductionResult<TypeId> leFamilyFn(TypeId instance, const std::vector<
|
||||||
return comparisonFamilyFn(instance, typeParams, packParams, ctx, "__le");
|
return comparisonFamilyFn(instance, typeParams, packParams, ctx, "__le");
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> eqFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> eqFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1434,8 +1432,8 @@ struct FindRefinementBlockers : TypeOnceVisitor
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> refineFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> refineFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 2 || !packParams.empty())
|
if (typeParams.size() != 2 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1519,8 +1517,8 @@ TypeFamilyReductionResult<TypeId> refineFamilyFn(TypeId instance, const std::vec
|
||||||
return {resultTy, false, {}, {}};
|
return {resultTy, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> singletonFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> singletonFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1556,8 +1554,8 @@ TypeFamilyReductionResult<TypeId> singletonFamilyFn(TypeId instance, const std::
|
||||||
return {ctx->builtins->unknownType, false, {}, {}};
|
return {ctx->builtins->unknownType, false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> unionFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> unionFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (!packParams.empty())
|
if (!packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1617,8 +1615,8 @@ TypeFamilyReductionResult<TypeId> unionFamilyFn(TypeId instance, const std::vect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> intersectFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> intersectFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (!packParams.empty())
|
if (!packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1841,8 +1839,8 @@ TypeFamilyReductionResult<TypeId> keyofFamilyImpl(
|
||||||
return {ctx->arena->addType(UnionType{singletons}), false, {}, {}};
|
return {ctx->arena->addType(UnionType{singletons}), false, {}, {}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> keyofFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> keyofFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
@ -1853,8 +1851,8 @@ TypeFamilyReductionResult<TypeId> keyofFamilyFn(TypeId instance, const std::vect
|
||||||
return keyofFamilyImpl(typeParams, packParams, ctx, /* isRaw */ false);
|
return keyofFamilyImpl(typeParams, packParams, ctx, /* isRaw */ false);
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeFamilyReductionResult<TypeId> rawkeyofFamilyFn(TypeId instance, const std::vector<TypeId>& typeParams,
|
TypeFamilyReductionResult<TypeId> rawkeyofFamilyFn(
|
||||||
const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
TypeId instance, const std::vector<TypeId>& typeParams, const std::vector<TypePackId>& packParams, NotNull<TypeFamilyContext> ctx)
|
||||||
{
|
{
|
||||||
if (typeParams.size() != 1 || !packParams.empty())
|
if (typeParams.size() != 1 || !packParams.empty())
|
||||||
{
|
{
|
||||||
|
|
|
@ -33,11 +33,11 @@ LUAU_FASTFLAG(LuauKnowsTheDataModel3)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauFreezeDuringUnification, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauFreezeDuringUnification, false)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauSharedSelf, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauSharedSelf, false)
|
||||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauMetatableInstantiationCloneCheck, false)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauTinyControlFlowAnalysis, false)
|
LUAU_FASTFLAGVARIABLE(LuauTinyControlFlowAnalysis, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauAlwaysCommitInferencesOfFunctionCalls, false)
|
LUAU_FASTFLAGVARIABLE(LuauAlwaysCommitInferencesOfFunctionCalls, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauRemoveBadRelationalOperatorWarning, false)
|
LUAU_FASTFLAGVARIABLE(LuauRemoveBadRelationalOperatorWarning, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauOkWithIteratingOverTableProperties, false)
|
LUAU_FASTFLAGVARIABLE(LuauOkWithIteratingOverTableProperties, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauReusableSubstitutions, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -214,6 +214,7 @@ TypeChecker::TypeChecker(const ScopePtr& globalScope, ModuleResolver* resolver,
|
||||||
, iceHandler(iceHandler)
|
, iceHandler(iceHandler)
|
||||||
, unifierState(iceHandler)
|
, unifierState(iceHandler)
|
||||||
, normalizer(nullptr, builtinTypes, NotNull{&unifierState})
|
, normalizer(nullptr, builtinTypes, NotNull{&unifierState})
|
||||||
|
, reusableInstantiation(TxnLog::empty(), nullptr, builtinTypes, {}, nullptr)
|
||||||
, nilType(builtinTypes->nilType)
|
, nilType(builtinTypes->nilType)
|
||||||
, numberType(builtinTypes->numberType)
|
, numberType(builtinTypes->numberType)
|
||||||
, stringType(builtinTypes->stringType)
|
, stringType(builtinTypes->stringType)
|
||||||
|
@ -4865,12 +4866,27 @@ TypeId TypeChecker::instantiate(const ScopePtr& scope, TypeId ty, Location locat
|
||||||
if (ftv && ftv->hasNoFreeOrGenericTypes)
|
if (ftv && ftv->hasNoFreeOrGenericTypes)
|
||||||
return ty;
|
return ty;
|
||||||
|
|
||||||
Instantiation instantiation{log, ¤tModule->internalTypes, builtinTypes, scope->level, /*scope*/ nullptr};
|
std::optional<TypeId> instantiated;
|
||||||
|
|
||||||
if (instantiationChildLimit)
|
if (FFlag::LuauReusableSubstitutions)
|
||||||
instantiation.childLimit = *instantiationChildLimit;
|
{
|
||||||
|
reusableInstantiation.resetState(log, ¤tModule->internalTypes, builtinTypes, scope->level, /*scope*/ nullptr);
|
||||||
|
|
||||||
|
if (instantiationChildLimit)
|
||||||
|
reusableInstantiation.childLimit = *instantiationChildLimit;
|
||||||
|
|
||||||
|
instantiated = reusableInstantiation.substitute(ty);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Instantiation instantiation{log, ¤tModule->internalTypes, builtinTypes, scope->level, /*scope*/ nullptr};
|
||||||
|
|
||||||
|
if (instantiationChildLimit)
|
||||||
|
instantiation.childLimit = *instantiationChildLimit;
|
||||||
|
|
||||||
|
instantiated = instantiation.substitute(ty);
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<TypeId> instantiated = instantiation.substitute(ty);
|
|
||||||
if (instantiated.has_value())
|
if (instantiated.has_value())
|
||||||
return *instantiated;
|
return *instantiated;
|
||||||
else
|
else
|
||||||
|
@ -5619,8 +5635,8 @@ TypeId TypeChecker::instantiateTypeFun(const ScopePtr& scope, const TypeFun& tf,
|
||||||
TypeId instantiated = *maybeInstantiated;
|
TypeId instantiated = *maybeInstantiated;
|
||||||
|
|
||||||
TypeId target = follow(instantiated);
|
TypeId target = follow(instantiated);
|
||||||
const TableType* tfTable = FFlag::LuauMetatableInstantiationCloneCheck ? getTableType(tf.type) : nullptr;
|
const TableType* tfTable = getTableType(tf.type);
|
||||||
bool needsClone = follow(tf.type) == target || (FFlag::LuauMetatableInstantiationCloneCheck && tfTable != nullptr && tfTable == getTableType(target));
|
bool needsClone = follow(tf.type) == target || (tfTable != nullptr && tfTable == getTableType(target));
|
||||||
bool shouldMutate = getTableType(tf.type);
|
bool shouldMutate = getTableType(tf.type);
|
||||||
TableType* ttv = getMutableTableType(target);
|
TableType* ttv = getMutableTableType(target);
|
||||||
|
|
||||||
|
|
|
@ -974,8 +974,7 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||||
if (!subNorm || !superNorm)
|
if (!subNorm || !superNorm)
|
||||||
reportError(location, NormalizationTooComplex{});
|
reportError(location, NormalizationTooComplex{});
|
||||||
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
|
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
|
||||||
tryUnifyNormalizedTypes(
|
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption);
|
||||||
subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption);
|
|
||||||
else
|
else
|
||||||
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the union options are compatible");
|
tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the union options are compatible");
|
||||||
}
|
}
|
||||||
|
|
|
@ -479,7 +479,7 @@ bool Unifier2::unify(const FunctionType* subFn, const AnyType* superAny)
|
||||||
|
|
||||||
bool Unifier2::unify(const AnyType* subAny, const TableType* superTable)
|
bool Unifier2::unify(const AnyType* subAny, const TableType* superTable)
|
||||||
{
|
{
|
||||||
for (const auto& [propName, prop]: superTable->props)
|
for (const auto& [propName, prop] : superTable->props)
|
||||||
{
|
{
|
||||||
if (prop.readTy)
|
if (prop.readTy)
|
||||||
unify(builtinTypes->anyType, *prop.readTy);
|
unify(builtinTypes->anyType, *prop.readTy);
|
||||||
|
@ -499,7 +499,7 @@ bool Unifier2::unify(const AnyType* subAny, const TableType* superTable)
|
||||||
|
|
||||||
bool Unifier2::unify(const TableType* subTable, const AnyType* superAny)
|
bool Unifier2::unify(const TableType* subTable, const AnyType* superAny)
|
||||||
{
|
{
|
||||||
for (const auto& [propName, prop]: subTable->props)
|
for (const auto& [propName, prop] : subTable->props)
|
||||||
{
|
{
|
||||||
if (prop.readTy)
|
if (prop.readTy)
|
||||||
unify(*prop.readTy, builtinTypes->anyType);
|
unify(*prop.readTy, builtinTypes->anyType);
|
||||||
|
@ -658,31 +658,31 @@ struct FreeTypeSearcher : TypeVisitor
|
||||||
{
|
{
|
||||||
switch (polarity)
|
switch (polarity)
|
||||||
{
|
{
|
||||||
case Positive:
|
case Positive:
|
||||||
{
|
{
|
||||||
if (seenPositive.contains(ty))
|
if (seenPositive.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenPositive.insert(ty);
|
seenPositive.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
case Negative:
|
case Negative:
|
||||||
{
|
{
|
||||||
if (seenNegative.contains(ty))
|
if (seenNegative.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenNegative.insert(ty);
|
seenNegative.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
case Both:
|
case Both:
|
||||||
{
|
{
|
||||||
if (seenPositive.contains(ty) && seenNegative.contains(ty))
|
if (seenPositive.contains(ty) && seenNegative.contains(ty))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
seenPositive.insert(ty);
|
seenPositive.insert(ty);
|
||||||
seenNegative.insert(ty);
|
seenNegative.insert(ty);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -326,13 +326,12 @@ enum class IrCmd : uint8_t
|
||||||
// This is used to recover after calling a variadic function
|
// This is used to recover after calling a variadic function
|
||||||
ADJUST_STACK_TO_TOP,
|
ADJUST_STACK_TO_TOP,
|
||||||
|
|
||||||
// Execute fastcall builtin function in-place
|
// Execute fastcall builtin function with 1 argument in-place
|
||||||
|
// This is used for a few builtins that can have more than 1 result and cannot be represented as a regular instruction
|
||||||
// A: unsigned int (builtin id)
|
// A: unsigned int (builtin id)
|
||||||
// B: Rn (result start)
|
// B: Rn (result start)
|
||||||
// C: Rn (argument start)
|
// C: Rn (first argument)
|
||||||
// D: Rn or Kn or undef (optional second argument)
|
// D: int (result count)
|
||||||
// E: int (argument count)
|
|
||||||
// F: int (result count)
|
|
||||||
FASTCALL,
|
FASTCALL,
|
||||||
|
|
||||||
// Call the fastcall builtin function
|
// Call the fastcall builtin function
|
||||||
|
@ -340,8 +339,9 @@ enum class IrCmd : uint8_t
|
||||||
// B: Rn (result start)
|
// B: Rn (result start)
|
||||||
// C: Rn (argument start)
|
// C: Rn (argument start)
|
||||||
// D: Rn or Kn or undef (optional second argument)
|
// D: Rn or Kn or undef (optional second argument)
|
||||||
// E: int (argument count or -1 to use all arguments up to stack top)
|
// E: Rn or Kn or undef (optional third argument)
|
||||||
// F: int (result count or -1 to preserve all results and adjust stack top)
|
// F: int (argument count or -1 to use all arguments up to stack top)
|
||||||
|
// G: int (result count or -1 to preserve all results and adjust stack top)
|
||||||
INVOKE_FASTCALL,
|
INVOKE_FASTCALL,
|
||||||
|
|
||||||
// Check that fastcall builtin function invocation was successful (negative result count jumps to fallback)
|
// Check that fastcall builtin function invocation was successful (negative result count jumps to fallback)
|
||||||
|
|
|
@ -64,6 +64,7 @@ inline bool isFastCall(LuauOpcode op)
|
||||||
case LOP_FASTCALL1:
|
case LOP_FASTCALL1:
|
||||||
case LOP_FASTCALL2:
|
case LOP_FASTCALL2:
|
||||||
case LOP_FASTCALL2K:
|
case LOP_FASTCALL2K:
|
||||||
|
case LOP_FASTCALL3:
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
#include "Luau/Common.h"
|
#include "Luau/Common.h"
|
||||||
#include "Luau/IrData.h"
|
#include "Luau/IrData.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -112,12 +112,48 @@ static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& i
|
||||||
visitor.useRange(vmRegOp(inst.a), function.intOp(inst.b));
|
visitor.useRange(vmRegOp(inst.a), function.intOp(inst.b));
|
||||||
break;
|
break;
|
||||||
|
|
||||||
// TODO: FASTCALL is more restrictive than INVOKE_FASTCALL; we should either determine the exact semantics, or rework it
|
|
||||||
case IrCmd::FASTCALL:
|
case IrCmd::FASTCALL:
|
||||||
case IrCmd::INVOKE_FASTCALL:
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
if (int count = function.intOp(inst.e); count != -1)
|
|
||||||
{
|
{
|
||||||
if (count >= 3)
|
visitor.use(inst.c);
|
||||||
|
|
||||||
|
if (int nresults = function.intOp(inst.d); nresults != -1)
|
||||||
|
visitor.defRange(vmRegOp(inst.b), nresults);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (int count = function.intOp(inst.e); count != -1)
|
||||||
|
{
|
||||||
|
if (count >= 3)
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(inst.d.kind == IrOpKind::VmReg && vmRegOp(inst.d) == vmRegOp(inst.c) + 1);
|
||||||
|
|
||||||
|
visitor.useRange(vmRegOp(inst.c), count);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (count >= 1)
|
||||||
|
visitor.use(inst.c);
|
||||||
|
|
||||||
|
if (count >= 2)
|
||||||
|
visitor.maybeUse(inst.d); // Argument can also be a VmConst
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
visitor.useVarargs(vmRegOp(inst.c));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
|
||||||
|
if (int count = function.intOp(inst.f); count != -1)
|
||||||
|
visitor.defRange(vmRegOp(inst.b), count);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case IrCmd::INVOKE_FASTCALL:
|
||||||
|
if (int count = function.intOp(FFlag::LuauCodegenFastcall3 ? inst.f : inst.e); count != -1)
|
||||||
|
{
|
||||||
|
// Only LOP_FASTCALL3 lowering is allowed to have third optional argument
|
||||||
|
if (count >= 3 && (!FFlag::LuauCodegenFastcall3 || inst.e.kind == IrOpKind::Undef))
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(inst.d.kind == IrOpKind::VmReg && vmRegOp(inst.d) == vmRegOp(inst.c) + 1);
|
CODEGEN_ASSERT(inst.d.kind == IrOpKind::VmReg && vmRegOp(inst.d) == vmRegOp(inst.c) + 1);
|
||||||
|
|
||||||
|
@ -130,6 +166,9 @@ static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& i
|
||||||
|
|
||||||
if (count >= 2)
|
if (count >= 2)
|
||||||
visitor.maybeUse(inst.d); // Argument can also be a VmConst
|
visitor.maybeUse(inst.d); // Argument can also be a VmConst
|
||||||
|
|
||||||
|
if (FFlag::LuauCodegenFastcall3 && count >= 3)
|
||||||
|
visitor.maybeUse(inst.e); // Argument can also be a VmConst
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -138,7 +177,7 @@ static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& i
|
||||||
}
|
}
|
||||||
|
|
||||||
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
|
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
|
||||||
if (int count = function.intOp(inst.f); count != -1)
|
if (int count = function.intOp(FFlag::LuauCodegenFastcall3 ? inst.g : inst.f); count != -1)
|
||||||
visitor.defRange(vmRegOp(inst.b), count);
|
visitor.defRange(vmRegOp(inst.b), count);
|
||||||
break;
|
break;
|
||||||
case IrCmd::FORGLOOP:
|
case IrCmd::FORGLOOP:
|
||||||
|
@ -188,15 +227,8 @@ static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& i
|
||||||
visitor.def(inst.b);
|
visitor.def(inst.b);
|
||||||
break;
|
break;
|
||||||
case IrCmd::FALLBACK_FORGPREP:
|
case IrCmd::FALLBACK_FORGPREP:
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
// This instruction doesn't always redefine Rn, Rn+1, Rn+2, so we have to mark it as implicit use
|
||||||
{
|
visitor.useRange(vmRegOp(inst.b), 3);
|
||||||
// This instruction doesn't always redefine Rn, Rn+1, Rn+2, so we have to mark it as implicit use
|
|
||||||
visitor.useRange(vmRegOp(inst.b), 3);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
visitor.use(inst.b);
|
|
||||||
}
|
|
||||||
|
|
||||||
visitor.defRange(vmRegOp(inst.b), 3);
|
visitor.defRange(vmRegOp(inst.b), 3);
|
||||||
break;
|
break;
|
||||||
|
@ -214,12 +246,6 @@ static void visitVmRegDefsUses(T& visitor, IrFunction& function, const IrInst& i
|
||||||
visitor.use(inst.a);
|
visitor.use(inst.a);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
// After optimizations with DebugLuauAbortingChecks enabled, CHECK_TAG Rn, tag, block instructions are generated
|
|
||||||
case IrCmd::CHECK_TAG:
|
|
||||||
if (!FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
visitor.maybeUse(inst.a);
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// All instructions which reference registers have to be handled explicitly
|
// All instructions which reference registers have to be handled explicitly
|
||||||
CODEGEN_ASSERT(inst.a.kind != IrOpKind::VmReg);
|
CODEGEN_ASSERT(inst.a.kind != IrOpKind::VmReg);
|
||||||
|
|
|
@ -11,30 +11,19 @@
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenDirectUserdataFlow)
|
|
||||||
LUAU_FASTFLAG(LuauLoadTypeInfo) // Because new VM typeinfo load changes the format used by Codegen, same flag is used
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenTypeInfo, false) // New analysis is flagged separately
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenAnalyzeHostVectorOps, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenAnalyzeHostVectorOps, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenLoadTypeUpvalCheck, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenLoadTypeUpvalCheck, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOps, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOps, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauCodegenFastcall3, false)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
namespace CodeGen
|
namespace CodeGen
|
||||||
{
|
{
|
||||||
|
|
||||||
static bool hasTypedParameters(Proto* proto)
|
|
||||||
{
|
|
||||||
CODEGEN_ASSERT(!FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
return proto->typeinfo && proto->numparams != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
static T read(uint8_t* data, size_t& offset)
|
static T read(uint8_t* data, size_t& offset)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
T result;
|
T result;
|
||||||
memcpy(&result, data + offset, sizeof(T));
|
memcpy(&result, data + offset, sizeof(T));
|
||||||
offset += sizeof(T);
|
offset += sizeof(T);
|
||||||
|
@ -44,8 +33,6 @@ static T read(uint8_t* data, size_t& offset)
|
||||||
|
|
||||||
static uint32_t readVarInt(uint8_t* data, size_t& offset)
|
static uint32_t readVarInt(uint8_t* data, size_t& offset)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
uint32_t result = 0;
|
uint32_t result = 0;
|
||||||
uint32_t shift = 0;
|
uint32_t shift = 0;
|
||||||
|
|
||||||
|
@ -63,8 +50,6 @@ static uint32_t readVarInt(uint8_t* data, size_t& offset)
|
||||||
|
|
||||||
void loadBytecodeTypeInfo(IrFunction& function)
|
void loadBytecodeTypeInfo(IrFunction& function)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
Proto* proto = function.proto;
|
Proto* proto = function.proto;
|
||||||
|
|
||||||
if (!proto)
|
if (!proto)
|
||||||
|
@ -173,8 +158,6 @@ static void prepareRegTypeInfoLookups(BytecodeTypeInfo& typeInfo)
|
||||||
|
|
||||||
static BytecodeRegTypeInfo* findRegType(BytecodeTypeInfo& info, uint8_t reg, int pc)
|
static BytecodeRegTypeInfo* findRegType(BytecodeTypeInfo& info, uint8_t reg, int pc)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenTypeInfo);
|
|
||||||
|
|
||||||
auto b = info.regTypes.begin() + info.regTypeOffsets[reg];
|
auto b = info.regTypes.begin() + info.regTypeOffsets[reg];
|
||||||
auto e = info.regTypes.begin() + info.regTypeOffsets[reg + 1];
|
auto e = info.regTypes.begin() + info.regTypeOffsets[reg + 1];
|
||||||
|
|
||||||
|
@ -199,8 +182,6 @@ static BytecodeRegTypeInfo* findRegType(BytecodeTypeInfo& info, uint8_t reg, int
|
||||||
|
|
||||||
static void refineRegType(BytecodeTypeInfo& info, uint8_t reg, int pc, uint8_t ty)
|
static void refineRegType(BytecodeTypeInfo& info, uint8_t reg, int pc, uint8_t ty)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenTypeInfo);
|
|
||||||
|
|
||||||
if (ty != LBC_TYPE_ANY)
|
if (ty != LBC_TYPE_ANY)
|
||||||
{
|
{
|
||||||
if (BytecodeRegTypeInfo* regType = findRegType(info, reg, pc))
|
if (BytecodeRegTypeInfo* regType = findRegType(info, reg, pc))
|
||||||
|
@ -219,8 +200,6 @@ static void refineRegType(BytecodeTypeInfo& info, uint8_t reg, int pc, uint8_t t
|
||||||
|
|
||||||
static void refineUpvalueType(BytecodeTypeInfo& info, int up, uint8_t ty)
|
static void refineUpvalueType(BytecodeTypeInfo& info, int up, uint8_t ty)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenTypeInfo);
|
|
||||||
|
|
||||||
if (ty != LBC_TYPE_ANY)
|
if (ty != LBC_TYPE_ANY)
|
||||||
{
|
{
|
||||||
if (size_t(up) < info.upvalueTypes.size())
|
if (size_t(up) < info.upvalueTypes.size())
|
||||||
|
@ -662,28 +641,12 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
|
|
||||||
// At the block start, reset or knowledge to the starting state
|
// At the block start, reset or knowledge to the starting state
|
||||||
// In the future we might be able to propagate some info between the blocks as well
|
// In the future we might be able to propagate some info between the blocks as well
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
for (size_t i = 0; i < bcTypeInfo.argumentTypes.size(); i++)
|
||||||
{
|
{
|
||||||
for (size_t i = 0; i < bcTypeInfo.argumentTypes.size(); i++)
|
uint8_t et = bcTypeInfo.argumentTypes[i];
|
||||||
{
|
|
||||||
uint8_t et = bcTypeInfo.argumentTypes[i];
|
|
||||||
|
|
||||||
// TODO: if argument is optional, this might force a VM exit unnecessarily
|
// TODO: if argument is optional, this might force a VM exit unnecessarily
|
||||||
regTags[i] = et & ~LBC_TYPE_OPTIONAL_BIT;
|
regTags[i] = et & ~LBC_TYPE_OPTIONAL_BIT;
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (hasTypedParameters(proto))
|
|
||||||
{
|
|
||||||
for (int i = 0; i < proto->numparams; ++i)
|
|
||||||
{
|
|
||||||
uint8_t et = proto->typeinfo[2 + i];
|
|
||||||
|
|
||||||
// TODO: if argument is optional, this might force a VM exit unnecessarily
|
|
||||||
regTags[i] = et & ~LBC_TYPE_OPTIONAL_BIT;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = proto->numparams; i < proto->maxstacksize; ++i)
|
for (int i = proto->numparams; i < proto->maxstacksize; ++i)
|
||||||
|
@ -696,16 +659,13 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
const Instruction* pc = &proto->code[i];
|
const Instruction* pc = &proto->code[i];
|
||||||
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
|
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(*pc));
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
// Assign known register types from local type information
|
||||||
|
// TODO: this is an expensive walk for each instruction
|
||||||
|
// TODO: it's best to lookup when register is actually used in the instruction
|
||||||
|
for (BytecodeRegTypeInfo& el : bcTypeInfo.regTypes)
|
||||||
{
|
{
|
||||||
// Assign known register types from local type information
|
if (el.type != LBC_TYPE_ANY && i >= el.startpc && i < el.endpc)
|
||||||
// TODO: this is an expensive walk for each instruction
|
regTags[el.reg] = el.type;
|
||||||
// TODO: it's best to lookup when register is actually used in the instruction
|
|
||||||
for (BytecodeRegTypeInfo& el : bcTypeInfo.regTypes)
|
|
||||||
{
|
|
||||||
if (el.type != LBC_TYPE_ANY && i >= el.startpc && i < el.endpc)
|
|
||||||
regTags[el.reg] = el.type;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
BytecodeTypes& bcType = function.bcTypes[i];
|
BytecodeTypes& bcType = function.bcTypes[i];
|
||||||
|
@ -727,8 +687,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = LBC_TYPE_BOOLEAN;
|
regTags[ra] = LBC_TYPE_BOOLEAN;
|
||||||
bcType.result = regTags[ra];
|
bcType.result = regTags[ra];
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_LOADN:
|
case LOP_LOADN:
|
||||||
|
@ -737,8 +696,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = LBC_TYPE_NUMBER;
|
regTags[ra] = LBC_TYPE_NUMBER;
|
||||||
bcType.result = regTags[ra];
|
bcType.result = regTags[ra];
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_LOADK:
|
case LOP_LOADK:
|
||||||
|
@ -749,8 +707,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = bcType.a;
|
regTags[ra] = bcType.a;
|
||||||
bcType.result = regTags[ra];
|
bcType.result = regTags[ra];
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_LOADKX:
|
case LOP_LOADKX:
|
||||||
|
@ -761,8 +718,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = bcType.a;
|
regTags[ra] = bcType.a;
|
||||||
bcType.result = regTags[ra];
|
bcType.result = regTags[ra];
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_MOVE:
|
case LOP_MOVE:
|
||||||
|
@ -773,8 +729,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = regTags[rb];
|
regTags[ra] = regTags[rb];
|
||||||
bcType.result = regTags[ra];
|
bcType.result = regTags[ra];
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_GETTABLE:
|
case LOP_GETTABLE:
|
||||||
|
@ -1142,8 +1097,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra + 3] = bcType.c;
|
regTags[ra + 3] = bcType.c;
|
||||||
regTags[ra] = bcType.result;
|
regTags[ra] = bcType.result;
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_FASTCALL1:
|
case LOP_FASTCALL1:
|
||||||
|
@ -1161,8 +1115,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[LUAU_INSN_B(*pc)] = bcType.a;
|
regTags[LUAU_INSN_B(*pc)] = bcType.a;
|
||||||
regTags[ra] = bcType.result;
|
regTags[ra] = bcType.result;
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_FASTCALL2:
|
case LOP_FASTCALL2:
|
||||||
|
@ -1180,8 +1133,29 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[int(pc[1])] = bcType.b;
|
regTags[int(pc[1])] = bcType.b;
|
||||||
regTags[ra] = bcType.result;
|
regTags[ra] = bcType.result;
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
break;
|
||||||
|
}
|
||||||
|
case LOP_FASTCALL3:
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(FFlag::LuauCodegenFastcall3);
|
||||||
|
|
||||||
|
int bfid = LUAU_INSN_A(*pc);
|
||||||
|
int skip = LUAU_INSN_C(*pc);
|
||||||
|
int aux = pc[1];
|
||||||
|
|
||||||
|
Instruction call = pc[skip + 1];
|
||||||
|
CODEGEN_ASSERT(LUAU_INSN_OP(call) == LOP_CALL);
|
||||||
|
int ra = LUAU_INSN_A(call);
|
||||||
|
|
||||||
|
applyBuiltinCall(bfid, bcType);
|
||||||
|
|
||||||
|
regTags[LUAU_INSN_B(*pc)] = bcType.a;
|
||||||
|
regTags[aux & 0xff] = bcType.b;
|
||||||
|
regTags[(aux >> 8) & 0xff] = bcType.c;
|
||||||
|
regTags[ra] = bcType.result;
|
||||||
|
|
||||||
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_FORNPREP:
|
case LOP_FORNPREP:
|
||||||
|
@ -1192,12 +1166,9 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra + 1] = LBC_TYPE_NUMBER;
|
regTags[ra + 1] = LBC_TYPE_NUMBER;
|
||||||
regTags[ra + 2] = LBC_TYPE_NUMBER;
|
regTags[ra + 2] = LBC_TYPE_NUMBER;
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, regTags[ra]);
|
||||||
{
|
refineRegType(bcTypeInfo, ra + 1, i, regTags[ra + 1]);
|
||||||
refineRegType(bcTypeInfo, ra, i, regTags[ra]);
|
refineRegType(bcTypeInfo, ra + 2, i, regTags[ra + 2]);
|
||||||
refineRegType(bcTypeInfo, ra + 1, i, regTags[ra + 1]);
|
|
||||||
refineRegType(bcTypeInfo, ra + 2, i, regTags[ra + 2]);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_FORNLOOP:
|
case LOP_FORNLOOP:
|
||||||
|
@ -1227,42 +1198,39 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
}
|
}
|
||||||
case LOP_NAMECALL:
|
case LOP_NAMECALL:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenDirectUserdataFlow)
|
int ra = LUAU_INSN_A(*pc);
|
||||||
|
int rb = LUAU_INSN_B(*pc);
|
||||||
|
uint32_t kc = pc[1];
|
||||||
|
|
||||||
|
bcType.a = regTags[rb];
|
||||||
|
bcType.b = getBytecodeConstantTag(proto, kc);
|
||||||
|
|
||||||
|
// While namecall might result in a callable table, we assume the function fast path
|
||||||
|
regTags[ra] = LBC_TYPE_FUNCTION;
|
||||||
|
|
||||||
|
// Namecall places source register into target + 1
|
||||||
|
regTags[ra + 1] = bcType.a;
|
||||||
|
|
||||||
|
bcType.result = LBC_TYPE_FUNCTION;
|
||||||
|
|
||||||
|
if (FFlag::LuauCodegenUserdataOps)
|
||||||
{
|
{
|
||||||
int ra = LUAU_INSN_A(*pc);
|
TString* str = gco2ts(function.proto->k[kc].value.gc);
|
||||||
int rb = LUAU_INSN_B(*pc);
|
const char* field = getstr(str);
|
||||||
uint32_t kc = pc[1];
|
|
||||||
|
|
||||||
bcType.a = regTags[rb];
|
if (FFlag::LuauCodegenAnalyzeHostVectorOps && bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
|
||||||
bcType.b = getBytecodeConstantTag(proto, kc);
|
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
|
||||||
|
else if (isCustomUserdataBytecodeType(bcType.a) && hostHooks.userdataNamecallBytecodeType)
|
||||||
// While namecall might result in a callable table, we assume the function fast path
|
knownNextCallResult = LuauBytecodeType(hostHooks.userdataNamecallBytecodeType(bcType.a, field, str->len));
|
||||||
regTags[ra] = LBC_TYPE_FUNCTION;
|
}
|
||||||
|
else
|
||||||
// Namecall places source register into target + 1
|
{
|
||||||
regTags[ra + 1] = bcType.a;
|
if (FFlag::LuauCodegenAnalyzeHostVectorOps && bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
|
||||||
|
|
||||||
bcType.result = LBC_TYPE_FUNCTION;
|
|
||||||
|
|
||||||
if (FFlag::LuauCodegenUserdataOps)
|
|
||||||
{
|
{
|
||||||
TString* str = gco2ts(function.proto->k[kc].value.gc);
|
TString* str = gco2ts(function.proto->k[kc].value.gc);
|
||||||
const char* field = getstr(str);
|
const char* field = getstr(str);
|
||||||
|
|
||||||
if (FFlag::LuauCodegenAnalyzeHostVectorOps && bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
|
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
|
||||||
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
|
|
||||||
else if (isCustomUserdataBytecodeType(bcType.a) && hostHooks.userdataNamecallBytecodeType)
|
|
||||||
knownNextCallResult = LuauBytecodeType(hostHooks.userdataNamecallBytecodeType(bcType.a, field, str->len));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (FFlag::LuauCodegenAnalyzeHostVectorOps && bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
|
|
||||||
{
|
|
||||||
TString* str = gco2ts(function.proto->k[kc].value.gc);
|
|
||||||
const char* field = getstr(str);
|
|
||||||
|
|
||||||
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -1282,42 +1250,35 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
|
||||||
regTags[ra] = bcType.result;
|
regTags[ra] = bcType.result;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
||||||
refineRegType(bcTypeInfo, ra, i, bcType.result);
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_GETUPVAL:
|
case LOP_GETUPVAL:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
int ra = LUAU_INSN_A(*pc);
|
||||||
|
int up = LUAU_INSN_B(*pc);
|
||||||
|
|
||||||
|
bcType.a = LBC_TYPE_ANY;
|
||||||
|
|
||||||
|
if (size_t(up) < bcTypeInfo.upvalueTypes.size())
|
||||||
{
|
{
|
||||||
int ra = LUAU_INSN_A(*pc);
|
uint8_t et = bcTypeInfo.upvalueTypes[up];
|
||||||
int up = LUAU_INSN_B(*pc);
|
|
||||||
|
|
||||||
bcType.a = LBC_TYPE_ANY;
|
// TODO: if argument is optional, this might force a VM exit unnecessarily
|
||||||
|
bcType.a = et & ~LBC_TYPE_OPTIONAL_BIT;
|
||||||
if (size_t(up) < bcTypeInfo.upvalueTypes.size())
|
|
||||||
{
|
|
||||||
uint8_t et = bcTypeInfo.upvalueTypes[up];
|
|
||||||
|
|
||||||
// TODO: if argument is optional, this might force a VM exit unnecessarily
|
|
||||||
bcType.a = et & ~LBC_TYPE_OPTIONAL_BIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
regTags[ra] = bcType.a;
|
|
||||||
bcType.result = regTags[ra];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
regTags[ra] = bcType.a;
|
||||||
|
bcType.result = regTags[ra];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_SETUPVAL:
|
case LOP_SETUPVAL:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenTypeInfo)
|
int ra = LUAU_INSN_A(*pc);
|
||||||
{
|
int up = LUAU_INSN_B(*pc);
|
||||||
int ra = LUAU_INSN_A(*pc);
|
|
||||||
int up = LUAU_INSN_B(*pc);
|
|
||||||
|
|
||||||
refineUpvalueType(bcTypeInfo, up, regTags[ra]);
|
refineUpvalueType(bcTypeInfo, up, regTags[ra]);
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LOP_GETGLOBAL:
|
case LOP_GETGLOBAL:
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
|
|
||||||
#include "lapi.h"
|
#include "lapi.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenTypeInfo)
|
|
||||||
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
||||||
LUAU_FASTFLAG(LuauNativeAttribute)
|
LUAU_FASTFLAG(LuauNativeAttribute)
|
||||||
|
|
||||||
|
@ -87,7 +86,6 @@ static void logFunctionHeader(AssemblyBuilder& build, Proto* proto)
|
||||||
template<typename AssemblyBuilder>
|
template<typename AssemblyBuilder>
|
||||||
static void logFunctionTypes_DEPRECATED(AssemblyBuilder& build, const IrFunction& function)
|
static void logFunctionTypes_DEPRECATED(AssemblyBuilder& build, const IrFunction& function)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenTypeInfo);
|
|
||||||
CODEGEN_ASSERT(!FFlag::LuauLoadUserdataInfo);
|
CODEGEN_ASSERT(!FFlag::LuauLoadUserdataInfo);
|
||||||
|
|
||||||
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
|
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
|
||||||
|
@ -131,7 +129,6 @@ static void logFunctionTypes_DEPRECATED(AssemblyBuilder& build, const IrFunction
|
||||||
template<typename AssemblyBuilder>
|
template<typename AssemblyBuilder>
|
||||||
static void logFunctionTypes(AssemblyBuilder& build, const IrFunction& function, const char* const* userdataTypes)
|
static void logFunctionTypes(AssemblyBuilder& build, const IrFunction& function, const char* const* userdataTypes)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauCodegenTypeInfo);
|
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadUserdataInfo);
|
CODEGEN_ASSERT(FFlag::LuauLoadUserdataInfo);
|
||||||
|
|
||||||
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
|
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
|
||||||
|
@ -240,7 +237,7 @@ static std::string getAssemblyImpl(AssemblyBuilder& build, const TValue* func, A
|
||||||
if (options.includeAssembly || options.includeIr)
|
if (options.includeAssembly || options.includeIr)
|
||||||
logFunctionHeader(build, p);
|
logFunctionHeader(build, p);
|
||||||
|
|
||||||
if (FFlag::LuauCodegenTypeInfo && options.includeIrTypes)
|
if (options.includeIrTypes)
|
||||||
{
|
{
|
||||||
if (FFlag::LuauLoadUserdataInfo)
|
if (FFlag::LuauLoadUserdataInfo)
|
||||||
logFunctionTypes(build, ir.function, options.compilationOptions.userdataTypes);
|
logFunctionTypes(build, ir.function, options.compilationOptions.userdataTypes);
|
||||||
|
|
|
@ -27,7 +27,6 @@ LUAU_FASTFLAG(DebugCodegenSkipNumbering)
|
||||||
LUAU_FASTINT(CodegenHeuristicsInstructionLimit)
|
LUAU_FASTINT(CodegenHeuristicsInstructionLimit)
|
||||||
LUAU_FASTINT(CodegenHeuristicsBlockLimit)
|
LUAU_FASTINT(CodegenHeuristicsBlockLimit)
|
||||||
LUAU_FASTINT(CodegenHeuristicsBlockInstructionLimit)
|
LUAU_FASTINT(CodegenHeuristicsBlockInstructionLimit)
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
||||||
LUAU_FASTFLAG(LuauNativeAttribute)
|
LUAU_FASTFLAG(LuauNativeAttribute)
|
||||||
|
|
||||||
|
@ -347,8 +346,7 @@ inline bool lowerFunction(IrBuilder& ir, AssemblyBuilder& build, ModuleHelpers&
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
markDeadStoresInBlockChains(ir);
|
||||||
markDeadStoresInBlockChains(ir);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<uint32_t> sortedBlocks = getSortedBlockOrder(ir.function);
|
std::vector<uint32_t> sortedBlocks = getSortedBlockOrder(ir.function);
|
||||||
|
|
|
@ -12,8 +12,6 @@
|
||||||
|
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
namespace CodeGen
|
namespace CodeGen
|
||||||
|
@ -29,17 +27,13 @@ static void emitBuiltinMathFrexp(IrRegAllocX64& regs, AssemblyBuilderX64& build,
|
||||||
callWrap.call(qword[rNativeContext + offsetof(NativeContext, libm_frexp)]);
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, libm_frexp)]);
|
||||||
|
|
||||||
build.vmovsd(luauRegValue(ra), xmm0);
|
build.vmovsd(luauRegValue(ra), xmm0);
|
||||||
|
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
|
||||||
|
|
||||||
if (nresults > 1)
|
if (nresults > 1)
|
||||||
{
|
{
|
||||||
build.vcvtsi2sd(xmm0, xmm0, dword[sTemporarySlot + 0]);
|
build.vcvtsi2sd(xmm0, xmm0, dword[sTemporarySlot + 0]);
|
||||||
build.vmovsd(luauRegValue(ra + 1), xmm0);
|
build.vmovsd(luauRegValue(ra + 1), xmm0);
|
||||||
|
build.mov(luauRegTag(ra + 1), LUA_TNUMBER);
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
build.mov(luauRegTag(ra + 1), LUA_TNUMBER);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,16 +46,12 @@ static void emitBuiltinMathModf(IrRegAllocX64& regs, AssemblyBuilderX64& build,
|
||||||
|
|
||||||
build.vmovsd(xmm1, qword[sTemporarySlot + 0]);
|
build.vmovsd(xmm1, qword[sTemporarySlot + 0]);
|
||||||
build.vmovsd(luauRegValue(ra), xmm1);
|
build.vmovsd(luauRegValue(ra), xmm1);
|
||||||
|
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
|
||||||
|
|
||||||
if (nresults > 1)
|
if (nresults > 1)
|
||||||
{
|
{
|
||||||
build.vmovsd(luauRegValue(ra + 1), xmm0);
|
build.vmovsd(luauRegValue(ra + 1), xmm0);
|
||||||
|
build.mov(luauRegTag(ra + 1), LUA_TNUMBER);
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
build.mov(luauRegTag(ra + 1), LUA_TNUMBER);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,23 +80,21 @@ static void emitBuiltinMathSign(IrRegAllocX64& regs, AssemblyBuilderX64& build,
|
||||||
build.vblendvpd(tmp0.reg, tmp2.reg, build.f64x2(1, 1), tmp0.reg);
|
build.vblendvpd(tmp0.reg, tmp2.reg, build.f64x2(1, 1), tmp0.reg);
|
||||||
|
|
||||||
build.vmovsd(luauRegValue(ra), tmp0.reg);
|
build.vmovsd(luauRegValue(ra), tmp0.reg);
|
||||||
|
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
build.mov(luauRegTag(ra), LUA_TNUMBER);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int ra, int arg, OperandX64 arg2, int nparams, int nresults)
|
void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int ra, int arg, int nresults)
|
||||||
{
|
{
|
||||||
switch (bfid)
|
switch (bfid)
|
||||||
{
|
{
|
||||||
case LBF_MATH_FREXP:
|
case LBF_MATH_FREXP:
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
CODEGEN_ASSERT(nresults == 1 || nresults == 2);
|
||||||
return emitBuiltinMathFrexp(regs, build, ra, arg, nresults);
|
return emitBuiltinMathFrexp(regs, build, ra, arg, nresults);
|
||||||
case LBF_MATH_MODF:
|
case LBF_MATH_MODF:
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
CODEGEN_ASSERT(nresults == 1 || nresults == 2);
|
||||||
return emitBuiltinMathModf(regs, build, ra, arg, nresults);
|
return emitBuiltinMathModf(regs, build, ra, arg, nresults);
|
||||||
case LBF_MATH_SIGN:
|
case LBF_MATH_SIGN:
|
||||||
CODEGEN_ASSERT(nparams == 1 && nresults == 1);
|
CODEGEN_ASSERT(nresults == 1);
|
||||||
return emitBuiltinMathSign(regs, build, ra, arg);
|
return emitBuiltinMathSign(regs, build, ra, arg);
|
||||||
default:
|
default:
|
||||||
CODEGEN_ASSERT(!"Missing x64 lowering");
|
CODEGEN_ASSERT(!"Missing x64 lowering");
|
||||||
|
|
|
@ -16,7 +16,7 @@ class AssemblyBuilderX64;
|
||||||
struct OperandX64;
|
struct OperandX64;
|
||||||
struct IrRegAllocX64;
|
struct IrRegAllocX64;
|
||||||
|
|
||||||
void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int ra, int arg, OperandX64 arg2, int nparams, int nresults);
|
void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int ra, int arg, int nresults);
|
||||||
|
|
||||||
} // namespace X64
|
} // namespace X64
|
||||||
} // namespace CodeGen
|
} // namespace CodeGen
|
||||||
|
|
|
@ -13,10 +13,10 @@
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauLoadTypeInfo) // Because new VM typeinfo load changes the format used by Codegen, same flag is used
|
|
||||||
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
||||||
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
||||||
LUAU_FASTFLAG(LuauCodegenInstG)
|
LUAU_FASTFLAG(LuauCodegenInstG)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -30,96 +30,9 @@ IrBuilder::IrBuilder(const HostIrHooks& hostHooks)
|
||||||
, constantMap({IrConstKind::Tag, ~0ull})
|
, constantMap({IrConstKind::Tag, ~0ull})
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
static bool hasTypedParameters_DEPRECATED(Proto* proto)
|
|
||||||
{
|
|
||||||
CODEGEN_ASSERT(!FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
return proto->typeinfo && proto->numparams != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void buildArgumentTypeChecks_DEPRECATED(IrBuilder& build, Proto* proto)
|
|
||||||
{
|
|
||||||
CODEGEN_ASSERT(!FFlag::LuauLoadTypeInfo);
|
|
||||||
CODEGEN_ASSERT(hasTypedParameters_DEPRECATED(proto));
|
|
||||||
|
|
||||||
for (int i = 0; i < proto->numparams; ++i)
|
|
||||||
{
|
|
||||||
uint8_t et = proto->typeinfo[2 + i];
|
|
||||||
|
|
||||||
uint8_t tag = et & ~LBC_TYPE_OPTIONAL_BIT;
|
|
||||||
uint8_t optional = et & LBC_TYPE_OPTIONAL_BIT;
|
|
||||||
|
|
||||||
if (tag == LBC_TYPE_ANY)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
IrOp load = build.inst(IrCmd::LOAD_TAG, build.vmReg(i));
|
|
||||||
|
|
||||||
IrOp nextCheck;
|
|
||||||
if (optional)
|
|
||||||
{
|
|
||||||
nextCheck = build.block(IrBlockKind::Internal);
|
|
||||||
IrOp fallbackCheck = build.block(IrBlockKind::Internal);
|
|
||||||
|
|
||||||
build.inst(IrCmd::JUMP_EQ_TAG, load, build.constTag(LUA_TNIL), nextCheck, fallbackCheck);
|
|
||||||
|
|
||||||
build.beginBlock(fallbackCheck);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (tag)
|
|
||||||
{
|
|
||||||
case LBC_TYPE_NIL:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TNIL), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_BOOLEAN:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TBOOLEAN), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_NUMBER:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TNUMBER), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_STRING:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TSTRING), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_TABLE:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TTABLE), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_FUNCTION:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TFUNCTION), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_THREAD:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TTHREAD), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_USERDATA:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TUSERDATA), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_VECTOR:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TVECTOR), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
case LBC_TYPE_BUFFER:
|
|
||||||
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TBUFFER), build.vmExit(kVmExitEntryGuardPc));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (optional)
|
|
||||||
{
|
|
||||||
build.inst(IrCmd::JUMP, nextCheck);
|
|
||||||
build.beginBlock(nextCheck);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the last argument is optional, we can skip creating a new internal block since one will already have been created.
|
|
||||||
if (!(proto->typeinfo[2 + proto->numparams - 1] & LBC_TYPE_OPTIONAL_BIT))
|
|
||||||
{
|
|
||||||
IrOp next = build.block(IrBlockKind::Internal);
|
|
||||||
build.inst(IrCmd::JUMP, next);
|
|
||||||
|
|
||||||
build.beginBlock(next);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static bool hasTypedParameters(const BytecodeTypeInfo& typeInfo)
|
static bool hasTypedParameters(const BytecodeTypeInfo& typeInfo)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
for (auto el : typeInfo.argumentTypes)
|
for (auto el : typeInfo.argumentTypes)
|
||||||
{
|
{
|
||||||
if (el != LBC_TYPE_ANY)
|
if (el != LBC_TYPE_ANY)
|
||||||
|
@ -131,8 +44,6 @@ static bool hasTypedParameters(const BytecodeTypeInfo& typeInfo)
|
||||||
|
|
||||||
static void buildArgumentTypeChecks(IrBuilder& build)
|
static void buildArgumentTypeChecks(IrBuilder& build)
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(FFlag::LuauLoadTypeInfo);
|
|
||||||
|
|
||||||
const BytecodeTypeInfo& typeInfo = build.function.bcTypeInfo;
|
const BytecodeTypeInfo& typeInfo = build.function.bcTypeInfo;
|
||||||
CODEGEN_ASSERT(hasTypedParameters(typeInfo));
|
CODEGEN_ASSERT(hasTypedParameters(typeInfo));
|
||||||
|
|
||||||
|
@ -228,11 +139,10 @@ void IrBuilder::buildFunctionIr(Proto* proto)
|
||||||
function.proto = proto;
|
function.proto = proto;
|
||||||
function.variadic = proto->is_vararg != 0;
|
function.variadic = proto->is_vararg != 0;
|
||||||
|
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
loadBytecodeTypeInfo(function);
|
||||||
loadBytecodeTypeInfo(function);
|
|
||||||
|
|
||||||
// Reserve entry block
|
// Reserve entry block
|
||||||
bool generateTypeChecks = FFlag::LuauLoadTypeInfo ? hasTypedParameters(function.bcTypeInfo) : hasTypedParameters_DEPRECATED(proto);
|
bool generateTypeChecks = hasTypedParameters(function.bcTypeInfo);
|
||||||
IrOp entry = generateTypeChecks ? block(IrBlockKind::Internal) : IrOp{};
|
IrOp entry = generateTypeChecks ? block(IrBlockKind::Internal) : IrOp{};
|
||||||
|
|
||||||
// Rebuild original control flow blocks
|
// Rebuild original control flow blocks
|
||||||
|
@ -247,10 +157,7 @@ void IrBuilder::buildFunctionIr(Proto* proto)
|
||||||
{
|
{
|
||||||
beginBlock(entry);
|
beginBlock(entry);
|
||||||
|
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
buildArgumentTypeChecks(*this);
|
||||||
buildArgumentTypeChecks(*this);
|
|
||||||
else
|
|
||||||
buildArgumentTypeChecks_DEPRECATED(*this, proto);
|
|
||||||
|
|
||||||
inst(IrCmd::JUMP, blockAtInst(0));
|
inst(IrCmd::JUMP, blockAtInst(0));
|
||||||
}
|
}
|
||||||
|
@ -544,16 +451,21 @@ void IrBuilder::translateInst(LuauOpcode op, const Instruction* pc, int i)
|
||||||
translateInstCloseUpvals(*this, pc);
|
translateInstCloseUpvals(*this, pc);
|
||||||
break;
|
break;
|
||||||
case LOP_FASTCALL:
|
case LOP_FASTCALL:
|
||||||
handleFastcallFallback(translateFastCallN(*this, pc, i, false, 0, {}), pc, i);
|
handleFastcallFallback(translateFastCallN(*this, pc, i, false, 0, {}, {}), pc, i);
|
||||||
break;
|
break;
|
||||||
case LOP_FASTCALL1:
|
case LOP_FASTCALL1:
|
||||||
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 1, undef()), pc, i);
|
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 1, undef(), undef()), pc, i);
|
||||||
break;
|
break;
|
||||||
case LOP_FASTCALL2:
|
case LOP_FASTCALL2:
|
||||||
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmReg(pc[1])), pc, i);
|
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmReg(pc[1]), undef()), pc, i);
|
||||||
break;
|
break;
|
||||||
case LOP_FASTCALL2K:
|
case LOP_FASTCALL2K:
|
||||||
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmConst(pc[1])), pc, i);
|
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 2, vmConst(pc[1]), undef()), pc, i);
|
||||||
|
break;
|
||||||
|
case LOP_FASTCALL3:
|
||||||
|
CODEGEN_ASSERT(FFlag::LuauCodegenFastcall3);
|
||||||
|
|
||||||
|
handleFastcallFallback(translateFastCallN(*this, pc, i, true, 3, vmReg(pc[1] & 0xff), vmReg((pc[1] >> 8) & 0xff)), pc, i);
|
||||||
break;
|
break;
|
||||||
case LOP_FORNPREP:
|
case LOP_FORNPREP:
|
||||||
translateInstForNPrep(*this, pc, i);
|
translateInstForNPrep(*this, pc, i);
|
||||||
|
|
|
@ -11,11 +11,11 @@
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
#include "lgc.h"
|
#include "lgc.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenSplitDoarith)
|
LUAU_FASTFLAG(LuauCodegenSplitDoarith)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataAlloc, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataAlloc, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOpsFixA64, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOpsFixA64, false)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -197,78 +197,50 @@ static void emitInvokeLibm1P(AssemblyBuilderA64& build, size_t func, int arg)
|
||||||
build.blr(x1);
|
build.blr(x1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool emitBuiltin(
|
static bool emitBuiltin(AssemblyBuilderA64& build, IrFunction& function, IrRegAllocA64& regs, int bfid, int res, int arg, int nresults)
|
||||||
AssemblyBuilderA64& build, IrFunction& function, IrRegAllocA64& regs, int bfid, int res, int arg, IrOp args, int nparams, int nresults)
|
|
||||||
{
|
{
|
||||||
switch (bfid)
|
switch (bfid)
|
||||||
{
|
{
|
||||||
case LBF_MATH_FREXP:
|
case LBF_MATH_FREXP:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
CODEGEN_ASSERT(nresults == 1 || nresults == 2);
|
||||||
{
|
emitInvokeLibm1P(build, offsetof(NativeContext, libm_frexp), arg);
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
build.str(d0, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
emitInvokeLibm1P(build, offsetof(NativeContext, libm_frexp), arg);
|
|
||||||
build.str(d0, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
|
|
||||||
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
||||||
build.mov(temp, LUA_TNUMBER);
|
build.mov(temp, LUA_TNUMBER);
|
||||||
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
||||||
|
|
||||||
if (nresults == 2)
|
if (nresults == 2)
|
||||||
{
|
|
||||||
build.ldr(w0, sTemporary);
|
|
||||||
build.scvtf(d1, w0);
|
|
||||||
build.str(d1, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
build.str(temp, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, tt)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
build.ldr(w0, sTemporary);
|
||||||
emitInvokeLibm1P(build, offsetof(NativeContext, libm_frexp), arg);
|
build.scvtf(d1, w0);
|
||||||
build.str(d0, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
build.str(d1, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
if (nresults == 2)
|
build.str(temp, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, tt)));
|
||||||
{
|
|
||||||
build.ldr(w0, sTemporary);
|
|
||||||
build.scvtf(d1, w0);
|
|
||||||
build.str(d1, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
case LBF_MATH_MODF:
|
case LBF_MATH_MODF:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
CODEGEN_ASSERT(nresults == 1 || nresults == 2);
|
||||||
{
|
emitInvokeLibm1P(build, offsetof(NativeContext, libm_modf), arg);
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
build.ldr(d1, sTemporary);
|
||||||
emitInvokeLibm1P(build, offsetof(NativeContext, libm_modf), arg);
|
build.str(d1, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
build.ldr(d1, sTemporary);
|
|
||||||
build.str(d1, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
|
|
||||||
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
||||||
build.mov(temp, LUA_TNUMBER);
|
build.mov(temp, LUA_TNUMBER);
|
||||||
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
||||||
|
|
||||||
if (nresults == 2)
|
if (nresults == 2)
|
||||||
{
|
|
||||||
build.str(d0, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
build.str(temp, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, tt)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(nparams == 1 && (nresults == 1 || nresults == 2));
|
build.str(d0, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
emitInvokeLibm1P(build, offsetof(NativeContext, libm_modf), arg);
|
build.str(temp, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, tt)));
|
||||||
build.ldr(d1, sTemporary);
|
|
||||||
build.str(d1, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
if (nresults == 2)
|
|
||||||
build.str(d0, mem(rBase, (res + 1) * sizeof(TValue) + offsetof(TValue, value.n)));
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
case LBF_MATH_SIGN:
|
case LBF_MATH_SIGN:
|
||||||
{
|
{
|
||||||
CODEGEN_ASSERT(nparams == 1 && nresults == 1);
|
CODEGEN_ASSERT(nresults == 1);
|
||||||
build.ldr(d0, mem(rBase, arg * sizeof(TValue) + offsetof(TValue, value.n)));
|
build.ldr(d0, mem(rBase, arg * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
build.fcmpz(d0);
|
build.fcmpz(d0);
|
||||||
build.fmov(d0, 0.0);
|
build.fmov(d0, 0.0);
|
||||||
|
@ -278,12 +250,10 @@ static bool emitBuiltin(
|
||||||
build.fcsel(d0, d1, d0, getConditionFP(IrCondition::Less));
|
build.fcsel(d0, d1, d0, getConditionFP(IrCondition::Less));
|
||||||
build.str(d0, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
build.str(d0, mem(rBase, res * sizeof(TValue) + offsetof(TValue, value.n)));
|
||||||
|
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
||||||
{
|
build.mov(temp, LUA_TNUMBER);
|
||||||
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
||||||
build.mov(temp, LUA_TNUMBER);
|
|
||||||
build.str(temp, mem(rBase, res * sizeof(TValue) + offsetof(TValue, tt)));
|
|
||||||
}
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1205,34 +1175,88 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
}
|
}
|
||||||
case IrCmd::FASTCALL:
|
case IrCmd::FASTCALL:
|
||||||
regs.spill(build, index);
|
regs.spill(build, index);
|
||||||
error |= !emitBuiltin(build, function, regs, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), inst.d, intOp(inst.e), intOp(inst.f));
|
|
||||||
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
|
error |= !emitBuiltin(build, function, regs, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), intOp(inst.d));
|
||||||
|
else
|
||||||
|
error |= !emitBuiltin(build, function, regs, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), intOp(inst.f));
|
||||||
|
|
||||||
break;
|
break;
|
||||||
case IrCmd::INVOKE_FASTCALL:
|
case IrCmd::INVOKE_FASTCALL:
|
||||||
{
|
{
|
||||||
regs.spill(build, index);
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
build.mov(x0, rState);
|
|
||||||
build.add(x1, rBase, uint16_t(vmRegOp(inst.b) * sizeof(TValue)));
|
|
||||||
build.add(x2, rBase, uint16_t(vmRegOp(inst.c) * sizeof(TValue)));
|
|
||||||
build.mov(w3, intOp(inst.f)); // nresults
|
|
||||||
|
|
||||||
if (inst.d.kind == IrOpKind::VmReg)
|
|
||||||
build.add(x4, rBase, uint16_t(vmRegOp(inst.d) * sizeof(TValue)));
|
|
||||||
else if (inst.d.kind == IrOpKind::VmConst)
|
|
||||||
emitAddOffset(build, x4, rConstants, vmConstOp(inst.d) * sizeof(TValue));
|
|
||||||
else
|
|
||||||
CODEGEN_ASSERT(inst.d.kind == IrOpKind::Undef);
|
|
||||||
|
|
||||||
// nparams
|
|
||||||
if (intOp(inst.e) == LUA_MULTRET)
|
|
||||||
{
|
{
|
||||||
// L->top - (ra + 1)
|
// We might need a temporary and we have to preserve it over the spill
|
||||||
build.ldr(x5, mem(rState, offsetof(lua_State, top)));
|
RegisterA64 temp = regs.allocTemp(KindA64::q);
|
||||||
build.sub(x5, x5, rBase);
|
regs.spill(build, index, {temp});
|
||||||
build.sub(x5, x5, uint16_t((vmRegOp(inst.b) + 1) * sizeof(TValue)));
|
|
||||||
build.lsr(x5, x5, kTValueSizeLog2);
|
build.mov(x0, rState);
|
||||||
|
build.add(x1, rBase, uint16_t(vmRegOp(inst.b) * sizeof(TValue)));
|
||||||
|
build.add(x2, rBase, uint16_t(vmRegOp(inst.c) * sizeof(TValue)));
|
||||||
|
build.mov(w3, intOp(inst.g)); // nresults
|
||||||
|
|
||||||
|
// 'E' argument can only be produced by LOP_FASTCALL3 lowering
|
||||||
|
if (inst.e.kind != IrOpKind::Undef)
|
||||||
|
{
|
||||||
|
CODEGEN_ASSERT(intOp(inst.f) == 3);
|
||||||
|
|
||||||
|
build.ldr(x4, mem(rState, offsetof(lua_State, top)));
|
||||||
|
|
||||||
|
build.ldr(temp, mem(rBase, vmRegOp(inst.d) * sizeof(TValue)));
|
||||||
|
build.str(temp, mem(x4, 0));
|
||||||
|
|
||||||
|
build.ldr(temp, mem(rBase, vmRegOp(inst.e) * sizeof(TValue)));
|
||||||
|
build.str(temp, mem(x4, sizeof(TValue)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (inst.d.kind == IrOpKind::VmReg)
|
||||||
|
build.add(x4, rBase, uint16_t(vmRegOp(inst.d) * sizeof(TValue)));
|
||||||
|
else if (inst.d.kind == IrOpKind::VmConst)
|
||||||
|
emitAddOffset(build, x4, rConstants, vmConstOp(inst.d) * sizeof(TValue));
|
||||||
|
else
|
||||||
|
CODEGEN_ASSERT(inst.d.kind == IrOpKind::Undef);
|
||||||
|
}
|
||||||
|
|
||||||
|
// nparams
|
||||||
|
if (intOp(inst.f) == LUA_MULTRET)
|
||||||
|
{
|
||||||
|
// L->top - (ra + 1)
|
||||||
|
build.ldr(x5, mem(rState, offsetof(lua_State, top)));
|
||||||
|
build.sub(x5, x5, rBase);
|
||||||
|
build.sub(x5, x5, uint16_t((vmRegOp(inst.b) + 1) * sizeof(TValue)));
|
||||||
|
build.lsr(x5, x5, kTValueSizeLog2);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
build.mov(w5, intOp(inst.f));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
build.mov(w5, intOp(inst.e));
|
{
|
||||||
|
regs.spill(build, index);
|
||||||
|
build.mov(x0, rState);
|
||||||
|
build.add(x1, rBase, uint16_t(vmRegOp(inst.b) * sizeof(TValue)));
|
||||||
|
build.add(x2, rBase, uint16_t(vmRegOp(inst.c) * sizeof(TValue)));
|
||||||
|
build.mov(w3, intOp(inst.f)); // nresults
|
||||||
|
|
||||||
|
if (inst.d.kind == IrOpKind::VmReg)
|
||||||
|
build.add(x4, rBase, uint16_t(vmRegOp(inst.d) * sizeof(TValue)));
|
||||||
|
else if (inst.d.kind == IrOpKind::VmConst)
|
||||||
|
emitAddOffset(build, x4, rConstants, vmConstOp(inst.d) * sizeof(TValue));
|
||||||
|
else
|
||||||
|
CODEGEN_ASSERT(inst.d.kind == IrOpKind::Undef);
|
||||||
|
|
||||||
|
// nparams
|
||||||
|
if (intOp(inst.e) == LUA_MULTRET)
|
||||||
|
{
|
||||||
|
// L->top - (ra + 1)
|
||||||
|
build.ldr(x5, mem(rState, offsetof(lua_State, top)));
|
||||||
|
build.sub(x5, x5, rBase);
|
||||||
|
build.sub(x5, x5, uint16_t((vmRegOp(inst.b) + 1) * sizeof(TValue)));
|
||||||
|
build.lsr(x5, x5, kTValueSizeLog2);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
build.mov(w5, intOp(inst.e));
|
||||||
|
}
|
||||||
|
|
||||||
build.ldr(x6, mem(rNativeContext, offsetof(NativeContext, luauF_table) + uintOp(inst.a) * sizeof(luau_FastFunction)));
|
build.ldr(x6, mem(rNativeContext, offsetof(NativeContext, luauF_table) + uintOp(inst.a) * sizeof(luau_FastFunction)));
|
||||||
build.blr(x6);
|
build.blr(x6);
|
||||||
|
@ -1443,35 +1467,14 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
Label fresh; // used when guard aborts execution or jumps to a VM exit
|
Label fresh; // used when guard aborts execution or jumps to a VM exit
|
||||||
Label& fail = getTargetLabel(inst.c, fresh);
|
Label& fail = getTargetLabel(inst.c, fresh);
|
||||||
|
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
if (tagOp(inst.b) == 0)
|
||||||
{
|
{
|
||||||
if (tagOp(inst.b) == 0)
|
build.cbnz(regOp(inst.a), fail);
|
||||||
{
|
|
||||||
build.cbnz(regOp(inst.a), fail);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
build.cmp(regOp(inst.a), tagOp(inst.b));
|
|
||||||
build.b(ConditionA64::NotEqual, fail);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// To support DebugLuauAbortingChecks, CHECK_TAG with VmReg has to be handled
|
build.cmp(regOp(inst.a), tagOp(inst.b));
|
||||||
RegisterA64 tag = inst.a.kind == IrOpKind::VmReg ? regs.allocTemp(KindA64::w) : regOp(inst.a);
|
build.b(ConditionA64::NotEqual, fail);
|
||||||
|
|
||||||
if (inst.a.kind == IrOpKind::VmReg)
|
|
||||||
build.ldr(tag, mem(rBase, vmRegOp(inst.a) * sizeof(TValue) + offsetof(TValue, tt)));
|
|
||||||
|
|
||||||
if (tagOp(inst.b) == 0)
|
|
||||||
{
|
|
||||||
build.cbnz(tag, fail);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
build.cmp(tag, tagOp(inst.b));
|
|
||||||
build.b(ConditionA64::NotEqual, fail);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
finalizeTargetLabel(inst.c, fresh);
|
finalizeTargetLabel(inst.c, fresh);
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -1008,9 +1009,10 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
|
|
||||||
case IrCmd::FASTCALL:
|
case IrCmd::FASTCALL:
|
||||||
{
|
{
|
||||||
OperandX64 arg2 = inst.d.kind != IrOpKind::Undef ? memRegDoubleOp(inst.d) : OperandX64{0};
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
|
emitBuiltin(regs, build, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), intOp(inst.d));
|
||||||
emitBuiltin(regs, build, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), arg2, intOp(inst.e), intOp(inst.f));
|
else
|
||||||
|
emitBuiltin(regs, build, uintOp(inst.a), vmRegOp(inst.b), vmRegOp(inst.c), intOp(inst.f));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::INVOKE_FASTCALL:
|
case IrCmd::INVOKE_FASTCALL:
|
||||||
|
@ -1018,25 +1020,49 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||||
unsigned bfid = uintOp(inst.a);
|
unsigned bfid = uintOp(inst.a);
|
||||||
|
|
||||||
OperandX64 args = 0;
|
OperandX64 args = 0;
|
||||||
|
ScopedRegX64 argsAlt{regs};
|
||||||
|
|
||||||
if (inst.d.kind == IrOpKind::VmReg)
|
// 'E' argument can only be produced by LOP_FASTCALL3
|
||||||
args = luauRegAddress(vmRegOp(inst.d));
|
if (FFlag::LuauCodegenFastcall3 && inst.e.kind != IrOpKind::Undef)
|
||||||
else if (inst.d.kind == IrOpKind::VmConst)
|
{
|
||||||
args = luauConstantAddress(vmConstOp(inst.d));
|
CODEGEN_ASSERT(intOp(inst.f) == 3);
|
||||||
|
|
||||||
|
ScopedRegX64 tmp{regs, SizeX64::xmmword};
|
||||||
|
argsAlt.alloc(SizeX64::qword);
|
||||||
|
|
||||||
|
build.mov(argsAlt.reg, qword[rState + offsetof(lua_State, top)]);
|
||||||
|
|
||||||
|
build.vmovups(tmp.reg, luauReg(vmRegOp(inst.d)));
|
||||||
|
build.vmovups(xmmword[argsAlt.reg], tmp.reg);
|
||||||
|
|
||||||
|
build.vmovups(tmp.reg, luauReg(vmRegOp(inst.e)));
|
||||||
|
build.vmovups(xmmword[argsAlt.reg + sizeof(TValue)], tmp.reg);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
CODEGEN_ASSERT(inst.d.kind == IrOpKind::Undef);
|
{
|
||||||
|
if (inst.d.kind == IrOpKind::VmReg)
|
||||||
|
args = luauRegAddress(vmRegOp(inst.d));
|
||||||
|
else if (inst.d.kind == IrOpKind::VmConst)
|
||||||
|
args = luauConstantAddress(vmConstOp(inst.d));
|
||||||
|
else
|
||||||
|
CODEGEN_ASSERT(inst.d.kind == IrOpKind::Undef);
|
||||||
|
}
|
||||||
|
|
||||||
int ra = vmRegOp(inst.b);
|
int ra = vmRegOp(inst.b);
|
||||||
int arg = vmRegOp(inst.c);
|
int arg = vmRegOp(inst.c);
|
||||||
int nparams = intOp(inst.e);
|
int nparams = intOp(FFlag::LuauCodegenFastcall3 ? inst.f : inst.e);
|
||||||
int nresults = intOp(inst.f);
|
int nresults = intOp(FFlag::LuauCodegenFastcall3 ? inst.g : inst.f);
|
||||||
|
|
||||||
IrCallWrapperX64 callWrap(regs, build, index);
|
IrCallWrapperX64 callWrap(regs, build, index);
|
||||||
callWrap.addArgument(SizeX64::qword, rState);
|
callWrap.addArgument(SizeX64::qword, rState);
|
||||||
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
||||||
callWrap.addArgument(SizeX64::qword, luauRegAddress(arg));
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(arg));
|
||||||
callWrap.addArgument(SizeX64::dword, nresults);
|
callWrap.addArgument(SizeX64::dword, nresults);
|
||||||
callWrap.addArgument(SizeX64::qword, args);
|
|
||||||
|
if (FFlag::LuauCodegenFastcall3 && inst.e.kind != IrOpKind::Undef)
|
||||||
|
callWrap.addArgument(SizeX64::qword, argsAlt);
|
||||||
|
else
|
||||||
|
callWrap.addArgument(SizeX64::qword, args);
|
||||||
|
|
||||||
if (nparams == LUA_MULTRET)
|
if (nparams == LUA_MULTRET)
|
||||||
{
|
{
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
#include <math.h>
|
#include <math.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
// TODO: when nresults is less than our actual result count, we can skip computing/writing unused results
|
// TODO: when nresults is less than our actual result count, we can skip computing/writing unused results
|
||||||
|
|
||||||
|
@ -46,19 +46,17 @@ static BuiltinImplResult translateBuiltinNumberToNumber(
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
build.inst(IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), args, build.constInt(1), build.constInt(1));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenRemoveDeadStores5)
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
{
|
build.inst(IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), build.constInt(1));
|
||||||
if (ra != arg)
|
else
|
||||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
build.inst(IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), args, build.constInt(1), build.constInt(1));
|
||||||
}
|
|
||||||
|
|
||||||
return {BuiltinImplType::Full, 1};
|
return {BuiltinImplType::Full, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinNumberToNumberLibm(
|
static BuiltinImplResult translateBuiltinNumberToNumberLibm(
|
||||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, int nresults, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 1 || nresults > 1)
|
if (nparams < 1 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -109,17 +107,12 @@ static BuiltinImplResult translateBuiltinNumberTo2Number(
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
build.inst(
|
|
||||||
IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), args, build.constInt(1), build.constInt(nresults == 1 ? 1 : 2));
|
|
||||||
|
|
||||||
if (!FFlag::LuauCodegenRemoveDeadStores5)
|
if (FFlag::LuauCodegenFastcall3)
|
||||||
{
|
build.inst(IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), build.constInt(nresults == 1 ? 1 : 2));
|
||||||
if (ra != arg)
|
else
|
||||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
build.inst(IrCmd::FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), build.undef(), build.constInt(1),
|
||||||
|
build.constInt(nresults == 1 ? 1 : 2));
|
||||||
if (nresults != 1)
|
|
||||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra + 1), build.constTag(LUA_TNUMBER));
|
|
||||||
}
|
|
||||||
|
|
||||||
return {BuiltinImplType::Full, 2};
|
return {BuiltinImplType::Full, 2};
|
||||||
}
|
}
|
||||||
|
@ -198,7 +191,8 @@ static BuiltinImplResult translateBuiltinMathLog(IrBuilder& build, int nparams,
|
||||||
return {BuiltinImplType::Full, 1};
|
return {BuiltinImplType::Full, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinMathMinMax(IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
static BuiltinImplResult translateBuiltinMathMinMax(
|
||||||
|
IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 2 || nparams > kMinMaxUnrolledParams || nresults > 1)
|
if (nparams < 2 || nparams > kMinMaxUnrolledParams || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -206,7 +200,10 @@ static BuiltinImplResult translateBuiltinMathMinMax(IrBuilder& build, IrCmd cmd,
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
|
|
||||||
for (int i = 3; i <= nparams; ++i)
|
if (FFlag::LuauCodegenFastcall3 && nparams >= 3)
|
||||||
|
builtinCheckDouble(build, arg3, pcpos);
|
||||||
|
|
||||||
|
for (int i = (FFlag::LuauCodegenFastcall3 ? 4 : 3); i <= nparams; ++i)
|
||||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + (i - 2)), pcpos);
|
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + (i - 2)), pcpos);
|
||||||
|
|
||||||
IrOp varg1 = builtinLoadDouble(build, build.vmReg(arg));
|
IrOp varg1 = builtinLoadDouble(build, build.vmReg(arg));
|
||||||
|
@ -214,7 +211,13 @@ static BuiltinImplResult translateBuiltinMathMinMax(IrBuilder& build, IrCmd cmd,
|
||||||
|
|
||||||
IrOp res = build.inst(cmd, varg2, varg1); // Swapped arguments are required for consistency with VM builtins
|
IrOp res = build.inst(cmd, varg2, varg1); // Swapped arguments are required for consistency with VM builtins
|
||||||
|
|
||||||
for (int i = 3; i <= nparams; ++i)
|
if (FFlag::LuauCodegenFastcall3 && nparams >= 3)
|
||||||
|
{
|
||||||
|
IrOp arg = builtinLoadDouble(build, arg3);
|
||||||
|
res = build.inst(cmd, arg, res);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = (FFlag::LuauCodegenFastcall3 ? 4 : 3); i <= nparams; ++i)
|
||||||
{
|
{
|
||||||
IrOp arg = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
|
IrOp arg = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
|
||||||
res = build.inst(cmd, arg, res);
|
res = build.inst(cmd, arg, res);
|
||||||
|
@ -228,7 +231,8 @@ static BuiltinImplResult translateBuiltinMathMinMax(IrBuilder& build, IrCmd cmd,
|
||||||
return {BuiltinImplType::Full, 1};
|
return {BuiltinImplType::Full, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinMathClamp(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
static BuiltinImplResult translateBuiltinMathClamp(
|
||||||
|
IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, IrOp fallback, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 3 || nresults > 1)
|
if (nparams < 3 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -239,10 +243,10 @@ static BuiltinImplResult translateBuiltinMathClamp(IrBuilder& build, int nparams
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + 1), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1), pcpos);
|
||||||
|
|
||||||
IrOp min = builtinLoadDouble(build, args);
|
IrOp min = builtinLoadDouble(build, args);
|
||||||
IrOp max = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + 1));
|
IrOp max = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1));
|
||||||
|
|
||||||
build.inst(IrCmd::JUMP_CMP_NUM, min, max, build.cond(IrCondition::NotLessEqual), fallback, block);
|
build.inst(IrCmd::JUMP_CMP_NUM, min, max, build.cond(IrCondition::NotLessEqual), fallback, block);
|
||||||
build.beginBlock(block);
|
build.beginBlock(block);
|
||||||
|
@ -305,7 +309,7 @@ static BuiltinImplResult translateBuiltinTypeof(IrBuilder& build, int nparams, i
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||||
IrBuilder& build, IrCmd cmd, bool btest, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
IrBuilder& build, IrCmd cmd, bool btest, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 2 || nparams > kBit32BinaryOpUnrolledParams || nresults > 1)
|
if (nparams < 2 || nparams > kBit32BinaryOpUnrolledParams || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -313,7 +317,10 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
|
|
||||||
for (int i = 3; i <= nparams; ++i)
|
if (FFlag::LuauCodegenFastcall3 && nparams >= 3)
|
||||||
|
builtinCheckDouble(build, arg3, pcpos);
|
||||||
|
|
||||||
|
for (int i = (FFlag::LuauCodegenFastcall3 ? 4 : 3); i <= nparams; ++i)
|
||||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + (i - 2)), pcpos);
|
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + (i - 2)), pcpos);
|
||||||
|
|
||||||
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
|
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
|
||||||
|
@ -324,7 +331,15 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||||
|
|
||||||
IrOp res = build.inst(cmd, vaui, vbui);
|
IrOp res = build.inst(cmd, vaui, vbui);
|
||||||
|
|
||||||
for (int i = 3; i <= nparams; ++i)
|
if (FFlag::LuauCodegenFastcall3 && nparams >= 3)
|
||||||
|
{
|
||||||
|
IrOp vc = builtinLoadDouble(build, arg3);
|
||||||
|
IrOp arg = build.inst(IrCmd::NUM_TO_UINT, vc);
|
||||||
|
|
||||||
|
res = build.inst(cmd, res, arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = (FFlag::LuauCodegenFastcall3 ? 4 : 3); i <= nparams; ++i)
|
||||||
{
|
{
|
||||||
IrOp vc = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
|
IrOp vc = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
|
||||||
IrOp arg = build.inst(IrCmd::NUM_TO_UINT, vc);
|
IrOp arg = build.inst(IrCmd::NUM_TO_UINT, vc);
|
||||||
|
@ -449,7 +464,7 @@ static BuiltinImplResult translateBuiltinBit32Rotate(IrBuilder& build, IrCmd cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinBit32Extract(
|
static BuiltinImplResult translateBuiltinBit32Extract(
|
||||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, IrOp fallback, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 2 || nresults > 1)
|
if (nparams < 2 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -497,8 +512,8 @@ static BuiltinImplResult translateBuiltinBit32Extract(
|
||||||
{
|
{
|
||||||
IrOp f = build.inst(IrCmd::NUM_TO_INT, vb);
|
IrOp f = build.inst(IrCmd::NUM_TO_INT, vb);
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(args.index + 1), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(args.index + 1), pcpos);
|
||||||
IrOp vc = builtinLoadDouble(build, build.vmReg(args.index + 1));
|
IrOp vc = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(args.index + 1));
|
||||||
IrOp w = build.inst(IrCmd::NUM_TO_INT, vc);
|
IrOp w = build.inst(IrCmd::NUM_TO_INT, vc);
|
||||||
|
|
||||||
IrOp block1 = build.block(IrBlockKind::Internal);
|
IrOp block1 = build.block(IrBlockKind::Internal);
|
||||||
|
@ -587,18 +602,18 @@ static BuiltinImplResult translateBuiltinBit32Unary(IrBuilder& build, IrCmd cmd,
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinBit32Replace(
|
static BuiltinImplResult translateBuiltinBit32Replace(
|
||||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, IrOp fallback, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 3 || nresults > 1)
|
if (nparams < 3 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
builtinCheckDouble(build, build.vmReg(args.index + 1), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(args.index + 1), pcpos);
|
||||||
|
|
||||||
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
|
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
|
||||||
IrOp vb = builtinLoadDouble(build, args);
|
IrOp vb = builtinLoadDouble(build, args);
|
||||||
IrOp vc = builtinLoadDouble(build, build.vmReg(args.index + 1));
|
IrOp vc = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(args.index + 1));
|
||||||
|
|
||||||
IrOp n = build.inst(IrCmd::NUM_TO_UINT, va);
|
IrOp n = build.inst(IrCmd::NUM_TO_UINT, va);
|
||||||
IrOp v = build.inst(IrCmd::NUM_TO_UINT, vb);
|
IrOp v = build.inst(IrCmd::NUM_TO_UINT, vb);
|
||||||
|
@ -623,8 +638,8 @@ static BuiltinImplResult translateBuiltinBit32Replace(
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
builtinCheckDouble(build, build.vmReg(args.index + 2), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? build.vmReg(vmRegOp(args) + 2) : build.vmReg(args.index + 2), pcpos);
|
||||||
IrOp vd = builtinLoadDouble(build, build.vmReg(args.index + 2));
|
IrOp vd = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? build.vmReg(vmRegOp(args) + 2) : build.vmReg(args.index + 2));
|
||||||
IrOp w = build.inst(IrCmd::NUM_TO_INT, vd);
|
IrOp w = build.inst(IrCmd::NUM_TO_INT, vd);
|
||||||
|
|
||||||
IrOp block1 = build.block(IrBlockKind::Internal);
|
IrOp block1 = build.block(IrBlockKind::Internal);
|
||||||
|
@ -661,7 +676,7 @@ static BuiltinImplResult translateBuiltinBit32Replace(
|
||||||
return {BuiltinImplType::UsesFallback, 1};
|
return {BuiltinImplType::UsesFallback, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinVector(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
static BuiltinImplResult translateBuiltinVector(IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, int pcpos)
|
||||||
{
|
{
|
||||||
if (nparams < 3 || nresults > 1)
|
if (nparams < 3 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
@ -670,11 +685,11 @@ static BuiltinImplResult translateBuiltinVector(IrBuilder& build, int nparams, i
|
||||||
|
|
||||||
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
builtinCheckDouble(build, build.vmReg(arg), pcpos);
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + 1), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1), pcpos);
|
||||||
|
|
||||||
IrOp x = builtinLoadDouble(build, build.vmReg(arg));
|
IrOp x = builtinLoadDouble(build, build.vmReg(arg));
|
||||||
IrOp y = builtinLoadDouble(build, args);
|
IrOp y = builtinLoadDouble(build, args);
|
||||||
IrOp z = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + 1));
|
IrOp z = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1));
|
||||||
|
|
||||||
build.inst(IrCmd::STORE_VECTOR, build.vmReg(ra), x, y, z);
|
build.inst(IrCmd::STORE_VECTOR, build.vmReg(ra), x, y, z);
|
||||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TVECTOR));
|
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TVECTOR));
|
||||||
|
@ -736,13 +751,14 @@ static BuiltinImplResult translateBuiltinStringLen(IrBuilder& build, int nparams
|
||||||
return {BuiltinImplType::Full, 1};
|
return {BuiltinImplType::Full, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
static void translateBufferArgsAndCheckBounds(IrBuilder& build, int nparams, int arg, IrOp args, int size, int pcpos, IrOp& buf, IrOp& intIndex)
|
static void translateBufferArgsAndCheckBounds(
|
||||||
|
IrBuilder& build, int nparams, int arg, IrOp args, IrOp arg3, int size, int pcpos, IrOp& buf, IrOp& intIndex)
|
||||||
{
|
{
|
||||||
build.loadAndCheckTag(build.vmReg(arg), LUA_TBUFFER, build.vmExit(pcpos));
|
build.loadAndCheckTag(build.vmReg(arg), LUA_TBUFFER, build.vmExit(pcpos));
|
||||||
builtinCheckDouble(build, args, pcpos);
|
builtinCheckDouble(build, args, pcpos);
|
||||||
|
|
||||||
if (nparams == 3)
|
if (nparams == 3)
|
||||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + 1), pcpos);
|
builtinCheckDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1), pcpos);
|
||||||
|
|
||||||
buf = build.inst(IrCmd::LOAD_POINTER, build.vmReg(arg));
|
buf = build.inst(IrCmd::LOAD_POINTER, build.vmReg(arg));
|
||||||
|
|
||||||
|
@ -753,13 +769,13 @@ static void translateBufferArgsAndCheckBounds(IrBuilder& build, int nparams, int
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinBufferRead(
|
static BuiltinImplResult translateBuiltinBufferRead(
|
||||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos, IrCmd readCmd, int size, IrCmd convCmd)
|
IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, int pcpos, IrCmd readCmd, int size, IrCmd convCmd)
|
||||||
{
|
{
|
||||||
if (nparams < 2 || nresults > 1)
|
if (nparams < 2 || nresults > 1)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
|
||||||
IrOp buf, intIndex;
|
IrOp buf, intIndex;
|
||||||
translateBufferArgsAndCheckBounds(build, nparams, arg, args, size, pcpos, buf, intIndex);
|
translateBufferArgsAndCheckBounds(build, nparams, arg, args, arg3, size, pcpos, buf, intIndex);
|
||||||
|
|
||||||
IrOp result = build.inst(readCmd, buf, intIndex);
|
IrOp result = build.inst(readCmd, buf, intIndex);
|
||||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), convCmd == IrCmd::NOP ? result : build.inst(convCmd, result));
|
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), convCmd == IrCmd::NOP ? result : build.inst(convCmd, result));
|
||||||
|
@ -769,21 +785,22 @@ static BuiltinImplResult translateBuiltinBufferRead(
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltinImplResult translateBuiltinBufferWrite(
|
static BuiltinImplResult translateBuiltinBufferWrite(
|
||||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos, IrCmd writeCmd, int size, IrCmd convCmd)
|
IrBuilder& build, int nparams, int ra, int arg, IrOp args, IrOp arg3, int nresults, int pcpos, IrCmd writeCmd, int size, IrCmd convCmd)
|
||||||
{
|
{
|
||||||
if (nparams < 3 || nresults > 0)
|
if (nparams < 3 || nresults > 0)
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
|
|
||||||
IrOp buf, intIndex;
|
IrOp buf, intIndex;
|
||||||
translateBufferArgsAndCheckBounds(build, nparams, arg, args, size, pcpos, buf, intIndex);
|
translateBufferArgsAndCheckBounds(build, nparams, arg, args, arg3, size, pcpos, buf, intIndex);
|
||||||
|
|
||||||
IrOp numValue = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + 1));
|
IrOp numValue = builtinLoadDouble(build, FFlag::LuauCodegenFastcall3 ? arg3 : build.vmReg(vmRegOp(args) + 1));
|
||||||
build.inst(writeCmd, buf, intIndex, convCmd == IrCmd::NOP ? numValue : build.inst(convCmd, numValue));
|
build.inst(writeCmd, buf, intIndex, convCmd == IrCmd::NOP ? numValue : build.inst(convCmd, numValue));
|
||||||
|
|
||||||
return {BuiltinImplType::Full, 0};
|
return {BuiltinImplType::Full, 0};
|
||||||
}
|
}
|
||||||
|
|
||||||
BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg, IrOp args, int nparams, int nresults, IrOp fallback, int pcpos)
|
BuiltinImplResult translateBuiltin(
|
||||||
|
IrBuilder& build, int bfid, int ra, int arg, IrOp args, IrOp arg3, int nparams, int nresults, IrOp fallback, int pcpos)
|
||||||
{
|
{
|
||||||
// Builtins are not allowed to handle variadic arguments
|
// Builtins are not allowed to handle variadic arguments
|
||||||
if (nparams == LUA_MULTRET)
|
if (nparams == LUA_MULTRET)
|
||||||
|
@ -800,11 +817,11 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_MATH_LOG:
|
case LBF_MATH_LOG:
|
||||||
return translateBuiltinMathLog(build, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinMathLog(build, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_MATH_MIN:
|
case LBF_MATH_MIN:
|
||||||
return translateBuiltinMathMinMax(build, IrCmd::MIN_NUM, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinMathMinMax(build, IrCmd::MIN_NUM, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_MATH_MAX:
|
case LBF_MATH_MAX:
|
||||||
return translateBuiltinMathMinMax(build, IrCmd::MAX_NUM, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinMathMinMax(build, IrCmd::MAX_NUM, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_MATH_CLAMP:
|
case LBF_MATH_CLAMP:
|
||||||
return translateBuiltinMathClamp(build, nparams, ra, arg, args, nresults, fallback, pcpos);
|
return translateBuiltinMathClamp(build, nparams, ra, arg, args, arg3, nresults, fallback, pcpos);
|
||||||
case LBF_MATH_FLOOR:
|
case LBF_MATH_FLOOR:
|
||||||
return translateBuiltinMathUnary(build, IrCmd::FLOOR_NUM, nparams, ra, arg, nresults, pcpos);
|
return translateBuiltinMathUnary(build, IrCmd::FLOOR_NUM, nparams, ra, arg, nresults, pcpos);
|
||||||
case LBF_MATH_CEIL:
|
case LBF_MATH_CEIL:
|
||||||
|
@ -826,7 +843,7 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_MATH_TAN:
|
case LBF_MATH_TAN:
|
||||||
case LBF_MATH_TANH:
|
case LBF_MATH_TANH:
|
||||||
case LBF_MATH_LOG10:
|
case LBF_MATH_LOG10:
|
||||||
return translateBuiltinNumberToNumberLibm(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinNumberToNumberLibm(build, LuauBuiltinFunction(bfid), nparams, ra, arg, nresults, pcpos);
|
||||||
case LBF_MATH_SIGN:
|
case LBF_MATH_SIGN:
|
||||||
return translateBuiltinNumberToNumber(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinNumberToNumber(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_MATH_POW:
|
case LBF_MATH_POW:
|
||||||
|
@ -838,13 +855,13 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_MATH_MODF:
|
case LBF_MATH_MODF:
|
||||||
return translateBuiltinNumberTo2Number(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinNumberTo2Number(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BIT32_BAND:
|
case LBF_BIT32_BAND:
|
||||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ false, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_BIT32_BOR:
|
case LBF_BIT32_BOR:
|
||||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITOR_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32BinaryOp(build, IrCmd::BITOR_UINT, /* btest= */ false, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_BIT32_BXOR:
|
case LBF_BIT32_BXOR:
|
||||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITXOR_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32BinaryOp(build, IrCmd::BITXOR_UINT, /* btest= */ false, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_BIT32_BTEST:
|
case LBF_BIT32_BTEST:
|
||||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ true, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ true, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_BIT32_BNOT:
|
case LBF_BIT32_BNOT:
|
||||||
return translateBuiltinBit32Bnot(build, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32Bnot(build, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BIT32_LSHIFT:
|
case LBF_BIT32_LSHIFT:
|
||||||
|
@ -858,7 +875,7 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_BIT32_RROTATE:
|
case LBF_BIT32_RROTATE:
|
||||||
return translateBuiltinBit32Rotate(build, IrCmd::BITRROTATE_UINT, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32Rotate(build, IrCmd::BITRROTATE_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BIT32_EXTRACT:
|
case LBF_BIT32_EXTRACT:
|
||||||
return translateBuiltinBit32Extract(build, nparams, ra, arg, args, nresults, fallback, pcpos);
|
return translateBuiltinBit32Extract(build, nparams, ra, arg, args, arg3, nresults, fallback, pcpos);
|
||||||
case LBF_BIT32_EXTRACTK:
|
case LBF_BIT32_EXTRACTK:
|
||||||
return translateBuiltinBit32ExtractK(build, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32ExtractK(build, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BIT32_COUNTLZ:
|
case LBF_BIT32_COUNTLZ:
|
||||||
|
@ -866,13 +883,13 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_BIT32_COUNTRZ:
|
case LBF_BIT32_COUNTRZ:
|
||||||
return translateBuiltinBit32Unary(build, IrCmd::BITCOUNTRZ_UINT, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32Unary(build, IrCmd::BITCOUNTRZ_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BIT32_REPLACE:
|
case LBF_BIT32_REPLACE:
|
||||||
return translateBuiltinBit32Replace(build, nparams, ra, arg, args, nresults, fallback, pcpos);
|
return translateBuiltinBit32Replace(build, nparams, ra, arg, args, arg3, nresults, fallback, pcpos);
|
||||||
case LBF_TYPE:
|
case LBF_TYPE:
|
||||||
return translateBuiltinType(build, nparams, ra, arg, args, nresults);
|
return translateBuiltinType(build, nparams, ra, arg, args, nresults);
|
||||||
case LBF_TYPEOF:
|
case LBF_TYPEOF:
|
||||||
return translateBuiltinTypeof(build, nparams, ra, arg, args, nresults);
|
return translateBuiltinTypeof(build, nparams, ra, arg, args, nresults);
|
||||||
case LBF_VECTOR:
|
case LBF_VECTOR:
|
||||||
return translateBuiltinVector(build, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinVector(build, nparams, ra, arg, args, arg3, nresults, pcpos);
|
||||||
case LBF_TABLE_INSERT:
|
case LBF_TABLE_INSERT:
|
||||||
return translateBuiltinTableInsert(build, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinTableInsert(build, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_STRING_LEN:
|
case LBF_STRING_LEN:
|
||||||
|
@ -880,31 +897,31 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||||
case LBF_BIT32_BYTESWAP:
|
case LBF_BIT32_BYTESWAP:
|
||||||
return translateBuiltinBit32Unary(build, IrCmd::BYTESWAP_UINT, nparams, ra, arg, args, nresults, pcpos);
|
return translateBuiltinBit32Unary(build, IrCmd::BYTESWAP_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||||
case LBF_BUFFER_READI8:
|
case LBF_BUFFER_READI8:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI8, 1, IrCmd::INT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READI8, 1, IrCmd::INT_TO_NUM);
|
||||||
case LBF_BUFFER_READU8:
|
case LBF_BUFFER_READU8:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READU8, 1, IrCmd::INT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READU8, 1, IrCmd::INT_TO_NUM);
|
||||||
case LBF_BUFFER_WRITEU8:
|
case LBF_BUFFER_WRITEU8:
|
||||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI8, 1, IrCmd::NUM_TO_UINT);
|
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_WRITEI8, 1, IrCmd::NUM_TO_UINT);
|
||||||
case LBF_BUFFER_READI16:
|
case LBF_BUFFER_READI16:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI16, 2, IrCmd::INT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READI16, 2, IrCmd::INT_TO_NUM);
|
||||||
case LBF_BUFFER_READU16:
|
case LBF_BUFFER_READU16:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READU16, 2, IrCmd::INT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READU16, 2, IrCmd::INT_TO_NUM);
|
||||||
case LBF_BUFFER_WRITEU16:
|
case LBF_BUFFER_WRITEU16:
|
||||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI16, 2, IrCmd::NUM_TO_UINT);
|
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_WRITEI16, 2, IrCmd::NUM_TO_UINT);
|
||||||
case LBF_BUFFER_READI32:
|
case LBF_BUFFER_READI32:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::INT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::INT_TO_NUM);
|
||||||
case LBF_BUFFER_READU32:
|
case LBF_BUFFER_READU32:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::UINT_TO_NUM);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::UINT_TO_NUM);
|
||||||
case LBF_BUFFER_WRITEU32:
|
case LBF_BUFFER_WRITEU32:
|
||||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI32, 4, IrCmd::NUM_TO_UINT);
|
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_WRITEI32, 4, IrCmd::NUM_TO_UINT);
|
||||||
case LBF_BUFFER_READF32:
|
case LBF_BUFFER_READF32:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READF32, 4, IrCmd::NOP);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READF32, 4, IrCmd::NOP);
|
||||||
case LBF_BUFFER_WRITEF32:
|
case LBF_BUFFER_WRITEF32:
|
||||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEF32, 4, IrCmd::NOP);
|
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_WRITEF32, 4, IrCmd::NOP);
|
||||||
case LBF_BUFFER_READF64:
|
case LBF_BUFFER_READF64:
|
||||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READF64, 8, IrCmd::NOP);
|
return translateBuiltinBufferRead(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_READF64, 8, IrCmd::NOP);
|
||||||
case LBF_BUFFER_WRITEF64:
|
case LBF_BUFFER_WRITEF64:
|
||||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEF64, 8, IrCmd::NOP);
|
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, arg3, nresults, pcpos, IrCmd::BUFFER_WRITEF64, 8, IrCmd::NOP);
|
||||||
default:
|
default:
|
||||||
return {BuiltinImplType::None, -1};
|
return {BuiltinImplType::None, -1};
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,8 @@ struct BuiltinImplResult
|
||||||
int actualResultCount;
|
int actualResultCount;
|
||||||
};
|
};
|
||||||
|
|
||||||
BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg, IrOp args, int nparams, int nresults, IrOp fallback, int pcpos);
|
BuiltinImplResult translateBuiltin(
|
||||||
|
IrBuilder& build, int bfid, int ra, int arg, IrOp args, IrOp arg3, int nparams, int nresults, IrOp fallback, int pcpos);
|
||||||
|
|
||||||
} // namespace CodeGen
|
} // namespace CodeGen
|
||||||
} // namespace Luau
|
} // namespace Luau
|
||||||
|
|
|
@ -13,9 +13,9 @@
|
||||||
#include "lstate.h"
|
#include "lstate.h"
|
||||||
#include "ltm.h"
|
#include "ltm.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenDirectUserdataFlow, false)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -743,7 +743,7 @@ void translateInstCloseUpvals(IrBuilder& build, const Instruction* pc)
|
||||||
build.inst(IrCmd::CLOSE_UPVALS, build.vmReg(ra));
|
build.inst(IrCmd::CLOSE_UPVALS, build.vmReg(ra));
|
||||||
}
|
}
|
||||||
|
|
||||||
IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool customParams, int customParamCount, IrOp customArgs)
|
IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool customParams, int customParamCount, IrOp customArgs, IrOp customArg3)
|
||||||
{
|
{
|
||||||
LuauOpcode opcode = LuauOpcode(LUAU_INSN_OP(*pc));
|
LuauOpcode opcode = LuauOpcode(LUAU_INSN_OP(*pc));
|
||||||
int bfid = LUAU_INSN_A(*pc);
|
int bfid = LUAU_INSN_A(*pc);
|
||||||
|
@ -769,13 +769,15 @@ IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool
|
||||||
builtinArgs = build.constDouble(protok.value.n);
|
builtinArgs = build.constDouble(protok.value.n);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IrOp builtinArg3 = FFlag::LuauCodegenFastcall3 ? (customParams ? customArg3 : build.vmReg(ra + 3)) : IrOp{};
|
||||||
|
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
|
|
||||||
// In unsafe environment, instead of retrying fastcall at 'pcpos' we side-exit directly to fallback sequence
|
// In unsafe environment, instead of retrying fastcall at 'pcpos' we side-exit directly to fallback sequence
|
||||||
build.inst(IrCmd::CHECK_SAFE_ENV, build.vmExit(pcpos + getOpLength(opcode)));
|
build.inst(IrCmd::CHECK_SAFE_ENV, build.vmExit(pcpos + getOpLength(opcode)));
|
||||||
|
|
||||||
BuiltinImplResult br =
|
BuiltinImplResult br = translateBuiltin(
|
||||||
translateBuiltin(build, LuauBuiltinFunction(bfid), ra, arg, builtinArgs, nparams, nresults, fallback, pcpos + getOpLength(opcode));
|
build, LuauBuiltinFunction(bfid), ra, arg, builtinArgs, builtinArg3, nparams, nresults, fallback, pcpos + getOpLength(opcode));
|
||||||
|
|
||||||
if (br.type != BuiltinImplType::None)
|
if (br.type != BuiltinImplType::None)
|
||||||
{
|
{
|
||||||
|
@ -792,6 +794,22 @@ IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool
|
||||||
return build.undef();
|
return build.undef();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (FFlag::LuauCodegenFastcall3)
|
||||||
|
{
|
||||||
|
IrOp arg3 = customParams ? customArg3 : build.undef();
|
||||||
|
|
||||||
|
// TODO: we can skip saving pc for some well-behaved builtins which we didn't inline
|
||||||
|
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + getOpLength(opcode)));
|
||||||
|
|
||||||
|
IrOp res = build.inst(IrCmd::INVOKE_FASTCALL, build.constUint(bfid), build.vmReg(ra), build.vmReg(arg), args, arg3, build.constInt(nparams),
|
||||||
|
build.constInt(nresults));
|
||||||
|
build.inst(IrCmd::CHECK_FASTCALL_RES, res, fallback);
|
||||||
|
|
||||||
|
if (nresults == LUA_MULTRET)
|
||||||
|
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(ra), res);
|
||||||
|
else if (nparams == LUA_MULTRET)
|
||||||
|
build.inst(IrCmd::ADJUST_STACK_TO_TOP);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// TODO: we can skip saving pc for some well-behaved builtins which we didn't inline
|
// TODO: we can skip saving pc for some well-behaved builtins which we didn't inline
|
||||||
|
@ -1277,7 +1295,7 @@ void translateInstGetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenDirectUserdataFlow && (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA))
|
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
|
||||||
{
|
{
|
||||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
|
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
|
||||||
|
|
||||||
|
@ -1324,7 +1342,7 @@ void translateInstSetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
|
|
||||||
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
|
||||||
|
|
||||||
if (FFlag::LuauCodegenDirectUserdataFlow && (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA))
|
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
|
||||||
{
|
{
|
||||||
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
|
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
|
||||||
|
|
||||||
|
@ -1446,7 +1464,7 @@ bool translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
|
|
||||||
BytecodeTypes bcTypes = build.function.getBytecodeTypesAt(pcpos);
|
BytecodeTypes bcTypes = build.function.getBytecodeTypesAt(pcpos);
|
||||||
|
|
||||||
if (FFlag::LuauCodegenDirectUserdataFlow && bcTypes.a == LBC_TYPE_VECTOR)
|
if (bcTypes.a == LBC_TYPE_VECTOR)
|
||||||
{
|
{
|
||||||
build.loadAndCheckTag(build.vmReg(rb), LUA_TVECTOR, build.vmExit(pcpos));
|
build.loadAndCheckTag(build.vmReg(rb), LUA_TVECTOR, build.vmExit(pcpos));
|
||||||
|
|
||||||
|
@ -1470,7 +1488,7 @@ bool translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (FFlag::LuauCodegenDirectUserdataFlow && (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA))
|
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
|
||||||
{
|
{
|
||||||
build.loadAndCheckTag(build.vmReg(rb), LUA_TUSERDATA, build.vmExit(pcpos));
|
build.loadAndCheckTag(build.vmReg(rb), LUA_TUSERDATA, build.vmExit(pcpos));
|
||||||
|
|
||||||
|
@ -1499,8 +1517,7 @@ bool translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||||
IrOp firstFastPathSuccess = build.block(IrBlockKind::Internal);
|
IrOp firstFastPathSuccess = build.block(IrBlockKind::Internal);
|
||||||
IrOp secondFastPath = build.block(IrBlockKind::Internal);
|
IrOp secondFastPath = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.loadAndCheckTag(
|
build.loadAndCheckTag(build.vmReg(rb), LUA_TTABLE, bcTypes.a == LBC_TYPE_TABLE ? build.vmExit(pcpos) : fallback);
|
||||||
build.vmReg(rb), LUA_TTABLE, FFlag::LuauCodegenDirectUserdataFlow && bcTypes.a == LBC_TYPE_TABLE ? build.vmExit(pcpos) : fallback);
|
|
||||||
IrOp table = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
IrOp table = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||||
|
|
||||||
CODEGEN_ASSERT(build.function.proto);
|
CODEGEN_ASSERT(build.function.proto);
|
||||||
|
|
|
@ -44,7 +44,8 @@ void translateInstDupTable(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
void translateInstGetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
void translateInstGetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
void translateInstSetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
void translateInstSetUpval(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
void translateInstCloseUpvals(IrBuilder& build, const Instruction* pc);
|
void translateInstCloseUpvals(IrBuilder& build, const Instruction* pc);
|
||||||
IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool customParams, int customParamCount, IrOp customArgs);
|
IrOp translateFastCallN(
|
||||||
|
IrBuilder& build, const Instruction* pc, int pcpos, bool customParams, int customParamCount, IrOp customArgs, IrOp customArg3);
|
||||||
void translateInstForNPrep(IrBuilder& build, const Instruction* pc, int pcpos);
|
void translateInstForNPrep(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
void translateInstForNLoop(IrBuilder& build, const Instruction* pc, int pcpos);
|
void translateInstForNLoop(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
void translateInstForGPrepNext(IrBuilder& build, const Instruction* pc, int pcpos);
|
void translateInstForGPrepNext(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
|
|
||||||
#include "Luau/IrUtils.h"
|
#include "Luau/IrUtils.h"
|
||||||
|
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
namespace CodeGen
|
namespace CodeGen
|
||||||
|
@ -44,11 +46,11 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
|
||||||
invalidateRestoreVmRegs(vmRegOp(inst.a), -1);
|
invalidateRestoreVmRegs(vmRegOp(inst.a), -1);
|
||||||
break;
|
break;
|
||||||
case IrCmd::FASTCALL:
|
case IrCmd::FASTCALL:
|
||||||
invalidateRestoreVmRegs(vmRegOp(inst.b), function.intOp(inst.f));
|
invalidateRestoreVmRegs(vmRegOp(inst.b), function.intOp(FFlag::LuauCodegenFastcall3 ? inst.d : inst.f));
|
||||||
break;
|
break;
|
||||||
case IrCmd::INVOKE_FASTCALL:
|
case IrCmd::INVOKE_FASTCALL:
|
||||||
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
|
// Multiple return sequences (count == -1) are defined by ADJUST_STACK_TO_REG
|
||||||
if (int count = function.intOp(inst.f); count != -1)
|
if (int count = function.intOp(FFlag::LuauCodegenFastcall3 ? inst.g : inst.f); count != -1)
|
||||||
invalidateRestoreVmRegs(vmRegOp(inst.b), count);
|
invalidateRestoreVmRegs(vmRegOp(inst.b), count);
|
||||||
break;
|
break;
|
||||||
case IrCmd::DO_ARITH:
|
case IrCmd::DO_ARITH:
|
||||||
|
@ -119,7 +121,7 @@ void IrValueLocationTracking::beforeInstLowering(IrInst& inst)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
// These instructions read VmReg only after optimizeMemoryOperandsX64
|
// These instructions read VmReg only after optimizeMemoryOperandsX64
|
||||||
case IrCmd::CHECK_TAG: // TODO: remove with FFlagLuauCodegenRemoveDeadStores5
|
case IrCmd::CHECK_TAG:
|
||||||
case IrCmd::CHECK_TRUTHY:
|
case IrCmd::CHECK_TRUTHY:
|
||||||
case IrCmd::ADD_NUM:
|
case IrCmd::ADD_NUM:
|
||||||
case IrCmd::SUB_NUM:
|
case IrCmd::SUB_NUM:
|
||||||
|
|
|
@ -18,10 +18,10 @@ LUAU_FASTINTVARIABLE(LuauCodeGenMinLinearBlockPath, 3)
|
||||||
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
|
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
|
||||||
LUAU_FASTINTVARIABLE(LuauCodeGenReuseUdataTagLimit, 64)
|
LUAU_FASTINTVARIABLE(LuauCodeGenReuseUdataTagLimit, 64)
|
||||||
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
|
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenFixSplitStoreConstMismatch, false)
|
LUAU_FASTFLAGVARIABLE(LuauCodegenFixSplitStoreConstMismatch, false)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -621,16 +621,9 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||||
std::tie(activeLoadCmd, activeLoadValue) = state.getPreviousVersionedLoadForTag(value, source);
|
std::tie(activeLoadCmd, activeLoadValue) = state.getPreviousVersionedLoadForTag(value, source);
|
||||||
|
|
||||||
if (state.tryGetTag(source) == value)
|
if (state.tryGetTag(source) == value)
|
||||||
{
|
kill(function, inst);
|
||||||
if (FFlag::DebugLuauAbortingChecks && !FFlag::LuauCodegenRemoveDeadStores5)
|
|
||||||
replace(function, block, index, {IrCmd::CHECK_TAG, inst.a, inst.b, build.undef()});
|
|
||||||
else
|
|
||||||
kill(function, inst);
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
|
||||||
state.saveTag(source, value);
|
state.saveTag(source, value);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -1150,39 +1143,33 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||||
|
|
||||||
case IrCmd::FASTCALL:
|
case IrCmd::FASTCALL:
|
||||||
{
|
{
|
||||||
if (FFlag::LuauCodegenRemoveDeadStores5)
|
LuauBuiltinFunction bfid = LuauBuiltinFunction(function.uintOp(inst.a));
|
||||||
|
int firstReturnReg = vmRegOp(inst.b);
|
||||||
|
int nresults = function.intOp(FFlag::LuauCodegenFastcall3 ? inst.d : inst.f);
|
||||||
|
|
||||||
|
// TODO: FASTCALL is more restrictive than INVOKE_FASTCALL; we should either determine the exact semantics, or rework it
|
||||||
|
handleBuiltinEffects(state, bfid, firstReturnReg, nresults);
|
||||||
|
|
||||||
|
switch (bfid)
|
||||||
{
|
{
|
||||||
LuauBuiltinFunction bfid = LuauBuiltinFunction(function.uintOp(inst.a));
|
case LBF_MATH_MODF:
|
||||||
int firstReturnReg = vmRegOp(inst.b);
|
case LBF_MATH_FREXP:
|
||||||
int nresults = function.intOp(inst.f);
|
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg)}, LUA_TNUMBER);
|
||||||
|
|
||||||
// TODO: FASTCALL is more restrictive than INVOKE_FASTCALL; we should either determine the exact semantics, or rework it
|
if (nresults > 1)
|
||||||
handleBuiltinEffects(state, bfid, firstReturnReg, nresults);
|
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg + 1)}, LUA_TNUMBER);
|
||||||
|
break;
|
||||||
switch (bfid)
|
case LBF_MATH_SIGN:
|
||||||
{
|
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg)}, LUA_TNUMBER);
|
||||||
case LBF_MATH_MODF:
|
break;
|
||||||
case LBF_MATH_FREXP:
|
default:
|
||||||
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg)}, LUA_TNUMBER);
|
break;
|
||||||
|
|
||||||
if (nresults > 1)
|
|
||||||
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg + 1)}, LUA_TNUMBER);
|
|
||||||
break;
|
|
||||||
case LBF_MATH_SIGN:
|
|
||||||
state.updateTag(IrOp{IrOpKind::VmReg, uint8_t(firstReturnReg)}, LUA_TNUMBER);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
handleBuiltinEffects(state, LuauBuiltinFunction(function.uintOp(inst.a)), vmRegOp(inst.b), function.intOp(inst.f));
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case IrCmd::INVOKE_FASTCALL:
|
case IrCmd::INVOKE_FASTCALL:
|
||||||
handleBuiltinEffects(state, LuauBuiltinFunction(function.uintOp(inst.a)), vmRegOp(inst.b), function.intOp(inst.f));
|
handleBuiltinEffects(
|
||||||
|
state, LuauBuiltinFunction(function.uintOp(inst.a)), vmRegOp(inst.b), function.intOp(FFlag::LuauCodegenFastcall3 ? inst.g : inst.f));
|
||||||
break;
|
break;
|
||||||
|
|
||||||
// These instructions don't have an effect on register/memory state we are tracking
|
// These instructions don't have an effect on register/memory state we are tracking
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
|
|
||||||
#include "lobject.h"
|
#include "lobject.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCodegenRemoveDeadStores5, false)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
|
|
||||||
// TODO: optimization can be improved by knowing which registers are live in at each VM exit
|
// TODO: optimization can be improved by knowing which registers are live in at each VM exit
|
||||||
|
|
|
@ -46,6 +46,7 @@
|
||||||
// Version 3: Adds FORGPREP/JUMPXEQK* and enhances AUX encoding for FORGLOOP. Removes FORGLOOP_NEXT/INEXT and JUMPIFEQK/JUMPIFNOTEQK. Currently supported.
|
// Version 3: Adds FORGPREP/JUMPXEQK* and enhances AUX encoding for FORGLOOP. Removes FORGLOOP_NEXT/INEXT and JUMPIFEQK/JUMPIFNOTEQK. Currently supported.
|
||||||
// Version 4: Adds Proto::flags, typeinfo, and floor division opcodes IDIV/IDIVK. Currently supported.
|
// Version 4: Adds Proto::flags, typeinfo, and floor division opcodes IDIV/IDIVK. Currently supported.
|
||||||
// Version 5: Adds SUBRK/DIVRK and vector constants. Currently supported.
|
// Version 5: Adds SUBRK/DIVRK and vector constants. Currently supported.
|
||||||
|
// Version 6: Adds FASTCALL3. Currently supported.
|
||||||
|
|
||||||
// # Bytecode type information history
|
// # Bytecode type information history
|
||||||
// Version 1: (from bytecode version 4) Type information for function signature. Currently supported.
|
// Version 1: (from bytecode version 4) Type information for function signature. Currently supported.
|
||||||
|
@ -299,8 +300,13 @@ enum LuauOpcode
|
||||||
// A: target register (see FORGLOOP for register layout)
|
// A: target register (see FORGLOOP for register layout)
|
||||||
LOP_FORGPREP_INEXT,
|
LOP_FORGPREP_INEXT,
|
||||||
|
|
||||||
// removed in v3
|
// FASTCALL3: perform a fast call of a built-in function using 3 register arguments
|
||||||
LOP_DEP_FORGLOOP_INEXT,
|
// A: builtin function id (see LuauBuiltinFunction)
|
||||||
|
// B: source argument register
|
||||||
|
// C: jump offset to get to following CALL
|
||||||
|
// AUX: source register 2 in least-significant byte
|
||||||
|
// AUX: source register 3 in second least-significant byte
|
||||||
|
LOP_FASTCALL3,
|
||||||
|
|
||||||
// FORGPREP_NEXT: prepare FORGLOOP with 2 output variables (no AUX encoding), assuming generator is luaB_next, and jump to FORGLOOP
|
// FORGPREP_NEXT: prepare FORGLOOP with 2 output variables (no AUX encoding), assuming generator is luaB_next, and jump to FORGLOOP
|
||||||
// A: target register (see FORGLOOP for register layout)
|
// A: target register (see FORGLOOP for register layout)
|
||||||
|
@ -434,7 +440,7 @@ enum LuauBytecodeTag
|
||||||
{
|
{
|
||||||
// Bytecode version; runtime supports [MIN, MAX], compiler emits TARGET by default but may emit a higher version when flags are enabled
|
// Bytecode version; runtime supports [MIN, MAX], compiler emits TARGET by default but may emit a higher version when flags are enabled
|
||||||
LBC_VERSION_MIN = 3,
|
LBC_VERSION_MIN = 3,
|
||||||
LBC_VERSION_MAX = 5,
|
LBC_VERSION_MAX = 6,
|
||||||
LBC_VERSION_TARGET = 5,
|
LBC_VERSION_TARGET = 5,
|
||||||
// Type encoding version
|
// Type encoding version
|
||||||
LBC_TYPE_VERSION_DEPRECATED = 1,
|
LBC_TYPE_VERSION_DEPRECATED = 1,
|
||||||
|
|
|
@ -28,6 +28,7 @@ inline int getOpLength(LuauOpcode op)
|
||||||
case LOP_LOADKX:
|
case LOP_LOADKX:
|
||||||
case LOP_FASTCALL2:
|
case LOP_FASTCALL2:
|
||||||
case LOP_FASTCALL2K:
|
case LOP_FASTCALL2K:
|
||||||
|
case LOP_FASTCALL3:
|
||||||
case LOP_JUMPXEQKNIL:
|
case LOP_JUMPXEQKNIL:
|
||||||
case LOP_JUMPXEQKB:
|
case LOP_JUMPXEQKB:
|
||||||
case LOP_JUMPXEQKN:
|
case LOP_JUMPXEQKN:
|
||||||
|
|
|
@ -120,12 +120,12 @@ public:
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
void clear()
|
void clear(size_t thresholdToDestroy = 32)
|
||||||
{
|
{
|
||||||
if (count == 0)
|
if (count == 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (capacity > 32)
|
if (capacity > thresholdToDestroy)
|
||||||
{
|
{
|
||||||
destroy();
|
destroy();
|
||||||
}
|
}
|
||||||
|
@ -583,9 +583,9 @@ public:
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
void clear()
|
void clear(size_t thresholdToDestroy = 32)
|
||||||
{
|
{
|
||||||
impl.clear();
|
impl.clear(thresholdToDestroy);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: this reference is invalidated by any insert operation (i.e. operator[])
|
// Note: this reference is invalidated by any insert operation (i.e. operator[])
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCompileTypeInfo, false)
|
LUAU_FASTFLAGVARIABLE(LuauCompileTypeInfo, false)
|
||||||
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
||||||
|
LUAU_FASTFLAG(LuauCompileFastcall3)
|
||||||
|
|
||||||
namespace Luau
|
namespace Luau
|
||||||
{
|
{
|
||||||
|
@ -113,6 +114,7 @@ inline bool isFastCall(LuauOpcode op)
|
||||||
case LOP_FASTCALL1:
|
case LOP_FASTCALL1:
|
||||||
case LOP_FASTCALL2:
|
case LOP_FASTCALL2:
|
||||||
case LOP_FASTCALL2K:
|
case LOP_FASTCALL2K:
|
||||||
|
case LOP_FASTCALL3:
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -1241,6 +1243,9 @@ std::string BytecodeBuilder::getError(const std::string& message)
|
||||||
uint8_t BytecodeBuilder::getVersion()
|
uint8_t BytecodeBuilder::getVersion()
|
||||||
{
|
{
|
||||||
// This function usually returns LBC_VERSION_TARGET but may sometimes return a higher number (within LBC_VERSION_MIN/MAX) under fast flags
|
// This function usually returns LBC_VERSION_TARGET but may sometimes return a higher number (within LBC_VERSION_MIN/MAX) under fast flags
|
||||||
|
if (FFlag::LuauCompileFastcall3)
|
||||||
|
return 6;
|
||||||
|
|
||||||
return LBC_VERSION_TARGET;
|
return LBC_VERSION_TARGET;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1621,6 +1626,16 @@ void BytecodeBuilder::validateInstructions() const
|
||||||
VCONSTANY(insns[i + 1]);
|
VCONSTANY(insns[i + 1]);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case LOP_FASTCALL3:
|
||||||
|
LUAU_ASSERT(FFlag::LuauCompileFastcall3);
|
||||||
|
|
||||||
|
VREG(LUAU_INSN_B(insn));
|
||||||
|
VJUMP(LUAU_INSN_C(insn));
|
||||||
|
LUAU_ASSERT(LUAU_INSN_OP(insns[i + 1 + LUAU_INSN_C(insn)]) == LOP_CALL);
|
||||||
|
VREG(insns[i + 1] & 0xff);
|
||||||
|
VREG((insns[i + 1] >> 8) & 0xff);
|
||||||
|
break;
|
||||||
|
|
||||||
case LOP_COVERAGE:
|
case LOP_COVERAGE:
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
@ -2235,6 +2250,13 @@ void BytecodeBuilder::dumpInstruction(const uint32_t* code, std::string& result,
|
||||||
code++;
|
code++;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case LOP_FASTCALL3:
|
||||||
|
LUAU_ASSERT(FFlag::LuauCompileFastcall3);
|
||||||
|
|
||||||
|
formatAppend(result, "FASTCALL3 %d R%d R%d R%d L%d\n", LUAU_INSN_A(insn), LUAU_INSN_B(insn), *code & 0xff, (*code >> 8) & 0xff, targetLabel);
|
||||||
|
code++;
|
||||||
|
break;
|
||||||
|
|
||||||
case LOP_COVERAGE:
|
case LOP_COVERAGE:
|
||||||
formatAppend(result, "COVERAGE\n");
|
formatAppend(result, "COVERAGE\n");
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -29,6 +29,7 @@ LUAU_FASTINTVARIABLE(LuauCompileInlineDepth, 5)
|
||||||
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCompileTempTypeInfo, false)
|
LUAU_FASTFLAGVARIABLE(LuauCompileTempTypeInfo, false)
|
||||||
LUAU_FASTFLAGVARIABLE(LuauCompileUserdataInfo, false)
|
LUAU_FASTFLAGVARIABLE(LuauCompileUserdataInfo, false)
|
||||||
|
LUAU_FASTFLAGVARIABLE(LuauCompileFastcall3, false)
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauNativeAttribute)
|
LUAU_FASTFLAG(LuauNativeAttribute)
|
||||||
|
|
||||||
|
@ -473,10 +474,32 @@ struct Compiler
|
||||||
{
|
{
|
||||||
LUAU_ASSERT(!expr->self);
|
LUAU_ASSERT(!expr->self);
|
||||||
LUAU_ASSERT(expr->args.size >= 1);
|
LUAU_ASSERT(expr->args.size >= 1);
|
||||||
LUAU_ASSERT(expr->args.size <= 2 || (bfid == LBF_BIT32_EXTRACTK && expr->args.size == 3));
|
|
||||||
|
if (FFlag::LuauCompileFastcall3)
|
||||||
|
LUAU_ASSERT(expr->args.size <= 3);
|
||||||
|
else
|
||||||
|
LUAU_ASSERT(expr->args.size <= 2 || (bfid == LBF_BIT32_EXTRACTK && expr->args.size == 3));
|
||||||
|
|
||||||
LUAU_ASSERT(bfid == LBF_BIT32_EXTRACTK ? bfK >= 0 : bfK < 0);
|
LUAU_ASSERT(bfid == LBF_BIT32_EXTRACTK ? bfK >= 0 : bfK < 0);
|
||||||
|
|
||||||
LuauOpcode opc = expr->args.size == 1 ? LOP_FASTCALL1 : (bfK >= 0 || isConstant(expr->args.data[1])) ? LOP_FASTCALL2K : LOP_FASTCALL2;
|
LuauOpcode opc = LOP_NOP;
|
||||||
|
|
||||||
|
if (FFlag::LuauCompileFastcall3)
|
||||||
|
{
|
||||||
|
if (expr->args.size == 1)
|
||||||
|
opc = LOP_FASTCALL1;
|
||||||
|
else if (bfK >= 0 || (expr->args.size == 2 && isConstant(expr->args.data[1])))
|
||||||
|
opc = LOP_FASTCALL2K;
|
||||||
|
else if (expr->args.size == 2)
|
||||||
|
opc = LOP_FASTCALL2;
|
||||||
|
else
|
||||||
|
opc = LOP_FASTCALL3;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
opc = expr->args.size == 1 ? LOP_FASTCALL1
|
||||||
|
: (bfK >= 0 || (expr->args.size == 2 && isConstant(expr->args.data[1]))) ? LOP_FASTCALL2K : LOP_FASTCALL2;
|
||||||
|
}
|
||||||
|
|
||||||
uint32_t args[3] = {};
|
uint32_t args[3] = {};
|
||||||
|
|
||||||
|
@ -504,8 +527,16 @@ struct Compiler
|
||||||
size_t fastcallLabel = bytecode.emitLabel();
|
size_t fastcallLabel = bytecode.emitLabel();
|
||||||
|
|
||||||
bytecode.emitABC(opc, uint8_t(bfid), uint8_t(args[0]), 0);
|
bytecode.emitABC(opc, uint8_t(bfid), uint8_t(args[0]), 0);
|
||||||
if (opc != LOP_FASTCALL1)
|
|
||||||
|
if (FFlag::LuauCompileFastcall3 && opc == LOP_FASTCALL3)
|
||||||
|
{
|
||||||
|
LUAU_ASSERT(bfK < 0);
|
||||||
|
bytecode.emitAux(args[1] | (args[2] << 8));
|
||||||
|
}
|
||||||
|
else if (opc != LOP_FASTCALL1)
|
||||||
|
{
|
||||||
bytecode.emitAux(bfK >= 0 ? bfK : args[1]);
|
bytecode.emitAux(bfK >= 0 ? bfK : args[1]);
|
||||||
|
}
|
||||||
|
|
||||||
// Set up a traditional Lua stack for the subsequent LOP_CALL.
|
// Set up a traditional Lua stack for the subsequent LOP_CALL.
|
||||||
// Note, as with other instructions that immediately follow FASTCALL, these are normally not executed and are used as a fallback for
|
// Note, as with other instructions that immediately follow FASTCALL, these are normally not executed and are used as a fallback for
|
||||||
|
@ -857,11 +888,28 @@ struct Compiler
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optimization: for 1/2 argument fast calls use specialized opcodes
|
unsigned maxFastcallArgs = 2;
|
||||||
if (bfid >= 0 && expr->args.size >= 1 && expr->args.size <= 2)
|
|
||||||
|
// Fastcall with 3 arguments is only used if it can help save one or more move instructions
|
||||||
|
if (FFlag::LuauCompileFastcall3 && bfid >= 0 && expr->args.size == 3)
|
||||||
|
{
|
||||||
|
for (size_t i = 0; i < expr->args.size; ++i)
|
||||||
|
{
|
||||||
|
if (int reg = getExprLocalReg(expr->args.data[i]); reg >= 0)
|
||||||
|
{
|
||||||
|
maxFastcallArgs = 3;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optimization: for 1/2/3 argument fast calls use specialized opcodes
|
||||||
|
if (bfid >= 0 && expr->args.size >= 1 && expr->args.size <= (FFlag::LuauCompileFastcall3 ? maxFastcallArgs : 2u))
|
||||||
{
|
{
|
||||||
if (!isExprMultRet(expr->args.data[expr->args.size - 1]))
|
if (!isExprMultRet(expr->args.data[expr->args.size - 1]))
|
||||||
|
{
|
||||||
return compileExprFastcallN(expr, target, targetCount, targetTop, multRet, regs, bfid);
|
return compileExprFastcallN(expr, target, targetCount, targetTop, multRet, regs, bfid);
|
||||||
|
}
|
||||||
else if (options.optimizationLevel >= 2)
|
else if (options.optimizationLevel >= 2)
|
||||||
{
|
{
|
||||||
// when a builtin is none-safe with matching arity, even if the last expression returns 0 or >1 arguments,
|
// when a builtin is none-safe with matching arity, even if the last expression returns 0 or >1 arguments,
|
||||||
|
|
|
@ -49,6 +49,7 @@ struct LintWarning
|
||||||
Code_CommentDirective = 26,
|
Code_CommentDirective = 26,
|
||||||
Code_IntegerParsing = 27,
|
Code_IntegerParsing = 27,
|
||||||
Code_ComparisonPrecedence = 28,
|
Code_ComparisonPrecedence = 28,
|
||||||
|
Code_RedundantNativeAttribute = 29,
|
||||||
|
|
||||||
Code__Count
|
Code__Count
|
||||||
};
|
};
|
||||||
|
@ -115,6 +116,7 @@ static const char* kWarningNames[] = {
|
||||||
"CommentDirective",
|
"CommentDirective",
|
||||||
"IntegerParsing",
|
"IntegerParsing",
|
||||||
"ComparisonPrecedence",
|
"ComparisonPrecedence",
|
||||||
|
"RedundantNativeAttribute",
|
||||||
};
|
};
|
||||||
// clang-format on
|
// clang-format on
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,6 @@
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauPushErrorStackCheck, false)
|
|
||||||
|
|
||||||
static const char* getfuncname(Closure* f);
|
static const char* getfuncname(Closure* f);
|
||||||
|
|
||||||
static int currentpc(lua_State* L, CallInfo* ci)
|
static int currentpc(lua_State* L, CallInfo* ci)
|
||||||
|
@ -332,8 +330,7 @@ l_noret luaG_runerrorL(lua_State* L, const char* fmt, ...)
|
||||||
vsnprintf(result, sizeof(result), fmt, argp);
|
vsnprintf(result, sizeof(result), fmt, argp);
|
||||||
va_end(argp);
|
va_end(argp);
|
||||||
|
|
||||||
if (FFlag::LuauPushErrorStackCheck)
|
lua_rawcheckstack(L, 1);
|
||||||
lua_rawcheckstack(L, 1);
|
|
||||||
|
|
||||||
pusherror(L, result);
|
pusherror(L, result);
|
||||||
luaD_throw(L, LUA_ERRRUN);
|
luaD_throw(L, LUA_ERRRUN);
|
||||||
|
@ -341,8 +338,7 @@ l_noret luaG_runerrorL(lua_State* L, const char* fmt, ...)
|
||||||
|
|
||||||
void luaG_pusherror(lua_State* L, const char* error)
|
void luaG_pusherror(lua_State* L, const char* error)
|
||||||
{
|
{
|
||||||
if (FFlag::LuauPushErrorStackCheck)
|
lua_rawcheckstack(L, 1);
|
||||||
lua_rawcheckstack(L, 1);
|
|
||||||
|
|
||||||
pusherror(L, error);
|
pusherror(L, error);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,8 +6,6 @@
|
||||||
#include "lmem.h"
|
#include "lmem.h"
|
||||||
#include "lgc.h"
|
#include "lgc.h"
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauLoadTypeInfo, false)
|
|
||||||
|
|
||||||
Proto* luaF_newproto(lua_State* L)
|
Proto* luaF_newproto(lua_State* L)
|
||||||
{
|
{
|
||||||
Proto* f = luaM_newgco(L, Proto, sizeof(Proto), L->activememcat);
|
Proto* f = luaM_newgco(L, Proto, sizeof(Proto), L->activememcat);
|
||||||
|
@ -52,9 +50,7 @@ Proto* luaF_newproto(lua_State* L)
|
||||||
f->linegaplog2 = 0;
|
f->linegaplog2 = 0;
|
||||||
f->linedefined = 0;
|
f->linedefined = 0;
|
||||||
f->bytecodeid = 0;
|
f->bytecodeid = 0;
|
||||||
|
f->sizetypeinfo = 0;
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
|
||||||
f->sizetypeinfo = 0;
|
|
||||||
|
|
||||||
return f;
|
return f;
|
||||||
}
|
}
|
||||||
|
@ -178,16 +174,8 @@ void luaF_freeproto(lua_State* L, Proto* f, lua_Page* page)
|
||||||
if (f->execdata)
|
if (f->execdata)
|
||||||
L->global->ecb.destroy(L, f);
|
L->global->ecb.destroy(L, f);
|
||||||
|
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
if (f->typeinfo)
|
||||||
{
|
luaM_freearray(L, f->typeinfo, f->sizetypeinfo, uint8_t, f->memcat);
|
||||||
if (f->typeinfo)
|
|
||||||
luaM_freearray(L, f->typeinfo, f->sizetypeinfo, uint8_t, f->memcat);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (f->typeinfo)
|
|
||||||
luaM_freearray(L, f->typeinfo, f->numparams + 2, uint8_t, f->memcat);
|
|
||||||
}
|
|
||||||
|
|
||||||
luaM_freegco(L, f, sizeof(Proto), f->memcat, page);
|
luaM_freegco(L, f, sizeof(Proto), f->memcat, page);
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,6 @@
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauLoadTypeInfo)
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Luau uses an incremental non-generational non-moving mark&sweep garbage collector.
|
* Luau uses an incremental non-generational non-moving mark&sweep garbage collector.
|
||||||
*
|
*
|
||||||
|
@ -507,16 +505,8 @@ static size_t propagatemark(global_State* g)
|
||||||
g->gray = p->gclist;
|
g->gray = p->gclist;
|
||||||
traverseproto(g, p);
|
traverseproto(g, p);
|
||||||
|
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
return sizeof(Proto) + sizeof(Instruction) * p->sizecode + sizeof(Proto*) * p->sizep + sizeof(TValue) * p->sizek + p->sizelineinfo +
|
||||||
{
|
sizeof(LocVar) * p->sizelocvars + sizeof(TString*) * p->sizeupvalues + p->sizetypeinfo;
|
||||||
return sizeof(Proto) + sizeof(Instruction) * p->sizecode + sizeof(Proto*) * p->sizep + sizeof(TValue) * p->sizek + p->sizelineinfo +
|
|
||||||
sizeof(LocVar) * p->sizelocvars + sizeof(TString*) * p->sizeupvalues + p->sizetypeinfo;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return sizeof(Proto) + sizeof(Instruction) * p->sizecode + sizeof(Proto*) * p->sizep + sizeof(TValue) * p->sizek + p->sizelineinfo +
|
|
||||||
sizeof(LocVar) * p->sizelocvars + sizeof(TString*) * p->sizeupvalues;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
LUAU_ASSERT(0);
|
LUAU_ASSERT(0);
|
||||||
|
|
|
@ -124,11 +124,16 @@ static_assert(offsetof(Udata, data) == ABISWITCH(16, 16, 12), "size mismatch for
|
||||||
static_assert(sizeof(Table) == ABISWITCH(48, 32, 32), "size mismatch for table header");
|
static_assert(sizeof(Table) == ABISWITCH(48, 32, 32), "size mismatch for table header");
|
||||||
static_assert(offsetof(Buffer, data) == ABISWITCH(8, 8, 8), "size mismatch for buffer header");
|
static_assert(offsetof(Buffer, data) == ABISWITCH(8, 8, 8), "size mismatch for buffer header");
|
||||||
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauExtendedSizeClasses, false)
|
|
||||||
|
|
||||||
const size_t kSizeClasses = LUA_SIZECLASSES;
|
const size_t kSizeClasses = LUA_SIZECLASSES;
|
||||||
const size_t kMaxSmallSize_DEPRECATED = 512; // TODO: remove with FFlagLuauExtendedSizeClasses
|
|
||||||
|
// Controls the number of entries in SizeClassConfig and define the maximum possible paged allocation size
|
||||||
|
// Modifications require updates the SizeClassConfig initialization
|
||||||
const size_t kMaxSmallSize = 1024;
|
const size_t kMaxSmallSize = 1024;
|
||||||
|
|
||||||
|
// Effective limit on object size to use paged allocation
|
||||||
|
// Can be modified without additional changes to code, provided it is smaller or equal to kMaxSmallSize
|
||||||
|
const size_t kMaxSmallSizeUsed = 1024;
|
||||||
|
|
||||||
const size_t kLargePageThreshold = 512; // larger pages are used for objects larger than this size to fit more of them into a page
|
const size_t kLargePageThreshold = 512; // larger pages are used for objects larger than this size to fit more of them into a page
|
||||||
|
|
||||||
// constant factor to reduce our page sizes by, to increase the chances that pages we allocate will
|
// constant factor to reduce our page sizes by, to increase the chances that pages we allocate will
|
||||||
|
@ -187,8 +192,7 @@ struct SizeClassConfig
|
||||||
const SizeClassConfig kSizeClassConfig;
|
const SizeClassConfig kSizeClassConfig;
|
||||||
|
|
||||||
// size class for a block of size sz; returns -1 for size=0 because empty allocations take no space
|
// size class for a block of size sz; returns -1 for size=0 because empty allocations take no space
|
||||||
#define sizeclass(sz) \
|
#define sizeclass(sz) (size_t((sz)-1) < kMaxSmallSizeUsed ? kSizeClassConfig.classForSize[sz] : -1)
|
||||||
(size_t((sz)-1) < (FFlag::LuauExtendedSizeClasses ? kMaxSmallSize : kMaxSmallSize_DEPRECATED) ? kSizeClassConfig.classForSize[sz] : -1)
|
|
||||||
|
|
||||||
// metadata for a block is stored in the first pointer of the block
|
// metadata for a block is stored in the first pointer of the block
|
||||||
#define metadata(block) (*(void**)(block))
|
#define metadata(block) (*(void**)(block))
|
||||||
|
@ -275,34 +279,18 @@ static lua_Page* newpage(lua_State* L, lua_Page** pageset, int pageSize, int blo
|
||||||
// if it is inlined, then the compiler may determine those functions are "too big" to be profitably inlined, which results in reduced performance
|
// if it is inlined, then the compiler may determine those functions are "too big" to be profitably inlined, which results in reduced performance
|
||||||
LUAU_NOINLINE static lua_Page* newclasspage(lua_State* L, lua_Page** freepageset, lua_Page** pageset, uint8_t sizeClass, bool storeMetadata)
|
LUAU_NOINLINE static lua_Page* newclasspage(lua_State* L, lua_Page** freepageset, lua_Page** pageset, uint8_t sizeClass, bool storeMetadata)
|
||||||
{
|
{
|
||||||
if (FFlag::LuauExtendedSizeClasses)
|
int sizeOfClass = kSizeClassConfig.sizeOfClass[sizeClass];
|
||||||
{
|
int pageSize = sizeOfClass > int(kLargePageThreshold) ? kLargePageSize : kSmallPageSize;
|
||||||
int sizeOfClass = kSizeClassConfig.sizeOfClass[sizeClass];
|
int blockSize = sizeOfClass + (storeMetadata ? kBlockHeader : 0);
|
||||||
int pageSize = sizeOfClass > int(kLargePageThreshold) ? kLargePageSize : kSmallPageSize;
|
int blockCount = (pageSize - offsetof(lua_Page, data)) / blockSize;
|
||||||
int blockSize = sizeOfClass + (storeMetadata ? kBlockHeader : 0);
|
|
||||||
int blockCount = (pageSize - offsetof(lua_Page, data)) / blockSize;
|
|
||||||
|
|
||||||
lua_Page* page = newpage(L, pageset, pageSize, blockSize, blockCount);
|
lua_Page* page = newpage(L, pageset, pageSize, blockSize, blockCount);
|
||||||
|
|
||||||
// prepend a page to page freelist (which is empty because we only ever allocate a new page when it is!)
|
// prepend a page to page freelist (which is empty because we only ever allocate a new page when it is!)
|
||||||
LUAU_ASSERT(!freepageset[sizeClass]);
|
LUAU_ASSERT(!freepageset[sizeClass]);
|
||||||
freepageset[sizeClass] = page;
|
freepageset[sizeClass] = page;
|
||||||
|
|
||||||
return page;
|
return page;
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
int blockSize = kSizeClassConfig.sizeOfClass[sizeClass] + (storeMetadata ? kBlockHeader : 0);
|
|
||||||
int blockCount = (kSmallPageSize - offsetof(lua_Page, data)) / blockSize;
|
|
||||||
|
|
||||||
lua_Page* page = newpage(L, pageset, kSmallPageSize, blockSize, blockCount);
|
|
||||||
|
|
||||||
// prepend a page to page freelist (which is empty because we only ever allocate a new page when it is!)
|
|
||||||
LUAU_ASSERT(!freepageset[sizeClass]);
|
|
||||||
freepageset[sizeClass] = page;
|
|
||||||
|
|
||||||
return page;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void freepage(lua_State* L, lua_Page** pageset, lua_Page* page)
|
static void freepage(lua_State* L, lua_Page** pageset, lua_Page* page)
|
||||||
|
|
|
@ -11,9 +11,6 @@
|
||||||
#include "ldebug.h"
|
#include "ldebug.h"
|
||||||
#include "lvm.h"
|
#include "lvm.h"
|
||||||
|
|
||||||
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFastTableMaxn, false)
|
|
||||||
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFasterConcat, false)
|
|
||||||
|
|
||||||
static int foreachi(lua_State* L)
|
static int foreachi(lua_State* L)
|
||||||
{
|
{
|
||||||
luaL_checktype(L, 1, LUA_TTABLE);
|
luaL_checktype(L, 1, LUA_TTABLE);
|
||||||
|
@ -56,41 +53,24 @@ static int maxn(lua_State* L)
|
||||||
double max = 0;
|
double max = 0;
|
||||||
luaL_checktype(L, 1, LUA_TTABLE);
|
luaL_checktype(L, 1, LUA_TTABLE);
|
||||||
|
|
||||||
if (DFFlag::LuauFastTableMaxn)
|
Table* t = hvalue(L->base);
|
||||||
|
|
||||||
|
for (int i = 0; i < t->sizearray; i++)
|
||||||
{
|
{
|
||||||
Table* t = hvalue(L->base);
|
if (!ttisnil(&t->array[i]))
|
||||||
|
max = i + 1;
|
||||||
for (int i = 0; i < t->sizearray; i++)
|
|
||||||
{
|
|
||||||
if (!ttisnil(&t->array[i]))
|
|
||||||
max = i + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < sizenode(t); i++)
|
|
||||||
{
|
|
||||||
LuaNode* n = gnode(t, i);
|
|
||||||
|
|
||||||
if (!ttisnil(gval(n)) && ttisnumber(gkey(n)))
|
|
||||||
{
|
|
||||||
double v = nvalue(gkey(n));
|
|
||||||
|
|
||||||
if (v > max)
|
|
||||||
max = v;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else
|
|
||||||
|
for (int i = 0; i < sizenode(t); i++)
|
||||||
{
|
{
|
||||||
lua_pushnil(L); // first key
|
LuaNode* n = gnode(t, i);
|
||||||
while (lua_next(L, 1))
|
|
||||||
|
if (!ttisnil(gval(n)) && ttisnumber(gkey(n)))
|
||||||
{
|
{
|
||||||
lua_pop(L, 1); // remove value
|
double v = nvalue(gkey(n));
|
||||||
if (lua_type(L, -1) == LUA_TNUMBER)
|
|
||||||
{
|
if (v > max)
|
||||||
double v = lua_tonumber(L, -1);
|
max = v;
|
||||||
if (v > max)
|
|
||||||
max = v;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,7 +231,7 @@ static int tmove(lua_State* L)
|
||||||
|
|
||||||
static void addfield(lua_State* L, luaL_Strbuf* b, int i, Table* t)
|
static void addfield(lua_State* L, luaL_Strbuf* b, int i, Table* t)
|
||||||
{
|
{
|
||||||
if (DFFlag::LuauFasterConcat && t && unsigned(i - 1) < unsigned(t->sizearray) && ttisstring(&t->array[i - 1]))
|
if (t && unsigned(i - 1) < unsigned(t->sizearray) && ttisstring(&t->array[i - 1]))
|
||||||
{
|
{
|
||||||
TString* ts = tsvalue(&t->array[i - 1]);
|
TString* ts = tsvalue(&t->array[i - 1]);
|
||||||
luaL_addlstring(b, getstr(ts), ts->len);
|
luaL_addlstring(b, getstr(ts), ts->len);
|
||||||
|
@ -273,14 +253,14 @@ static int tconcat(lua_State* L)
|
||||||
int i = luaL_optinteger(L, 3, 1);
|
int i = luaL_optinteger(L, 3, 1);
|
||||||
int last = luaL_opt(L, luaL_checkinteger, 4, lua_objlen(L, 1));
|
int last = luaL_opt(L, luaL_checkinteger, 4, lua_objlen(L, 1));
|
||||||
|
|
||||||
Table* t = DFFlag::LuauFasterConcat ? hvalue(L->base) : NULL;
|
Table* t = hvalue(L->base);
|
||||||
|
|
||||||
luaL_Strbuf b;
|
luaL_Strbuf b;
|
||||||
luaL_buffinit(L, &b);
|
luaL_buffinit(L, &b);
|
||||||
for (; i < last; i++)
|
for (; i < last; i++)
|
||||||
{
|
{
|
||||||
addfield(L, &b, i, t);
|
addfield(L, &b, i, t);
|
||||||
if (!DFFlag::LuauFasterConcat || lsep != 0)
|
if (lsep != 0)
|
||||||
luaL_addlstring(&b, sep, lsep);
|
luaL_addlstring(&b, sep, lsep);
|
||||||
}
|
}
|
||||||
if (i == last) // add last value (if interval was not empty)
|
if (i == last) // add last value (if interval was not empty)
|
||||||
|
|
|
@ -98,7 +98,7 @@ LUAU_FASTFLAGVARIABLE(LuauVmSplitDoarith, false)
|
||||||
VM_DISPATCH_OP(LOP_POWK), VM_DISPATCH_OP(LOP_AND), VM_DISPATCH_OP(LOP_OR), VM_DISPATCH_OP(LOP_ANDK), VM_DISPATCH_OP(LOP_ORK), \
|
VM_DISPATCH_OP(LOP_POWK), VM_DISPATCH_OP(LOP_AND), VM_DISPATCH_OP(LOP_OR), VM_DISPATCH_OP(LOP_ANDK), VM_DISPATCH_OP(LOP_ORK), \
|
||||||
VM_DISPATCH_OP(LOP_CONCAT), VM_DISPATCH_OP(LOP_NOT), VM_DISPATCH_OP(LOP_MINUS), VM_DISPATCH_OP(LOP_LENGTH), VM_DISPATCH_OP(LOP_NEWTABLE), \
|
VM_DISPATCH_OP(LOP_CONCAT), VM_DISPATCH_OP(LOP_NOT), VM_DISPATCH_OP(LOP_MINUS), VM_DISPATCH_OP(LOP_LENGTH), VM_DISPATCH_OP(LOP_NEWTABLE), \
|
||||||
VM_DISPATCH_OP(LOP_DUPTABLE), VM_DISPATCH_OP(LOP_SETLIST), VM_DISPATCH_OP(LOP_FORNPREP), VM_DISPATCH_OP(LOP_FORNLOOP), \
|
VM_DISPATCH_OP(LOP_DUPTABLE), VM_DISPATCH_OP(LOP_SETLIST), VM_DISPATCH_OP(LOP_FORNPREP), VM_DISPATCH_OP(LOP_FORNLOOP), \
|
||||||
VM_DISPATCH_OP(LOP_FORGLOOP), VM_DISPATCH_OP(LOP_FORGPREP_INEXT), VM_DISPATCH_OP(LOP_DEP_FORGLOOP_INEXT), VM_DISPATCH_OP(LOP_FORGPREP_NEXT), \
|
VM_DISPATCH_OP(LOP_FORGLOOP), VM_DISPATCH_OP(LOP_FORGPREP_INEXT), VM_DISPATCH_OP(LOP_FASTCALL3), VM_DISPATCH_OP(LOP_FORGPREP_NEXT), \
|
||||||
VM_DISPATCH_OP(LOP_NATIVECALL), VM_DISPATCH_OP(LOP_GETVARARGS), VM_DISPATCH_OP(LOP_DUPCLOSURE), VM_DISPATCH_OP(LOP_PREPVARARGS), \
|
VM_DISPATCH_OP(LOP_NATIVECALL), VM_DISPATCH_OP(LOP_GETVARARGS), VM_DISPATCH_OP(LOP_DUPCLOSURE), VM_DISPATCH_OP(LOP_PREPVARARGS), \
|
||||||
VM_DISPATCH_OP(LOP_LOADKX), VM_DISPATCH_OP(LOP_JUMPX), VM_DISPATCH_OP(LOP_FASTCALL), VM_DISPATCH_OP(LOP_COVERAGE), \
|
VM_DISPATCH_OP(LOP_LOADKX), VM_DISPATCH_OP(LOP_JUMPX), VM_DISPATCH_OP(LOP_FASTCALL), VM_DISPATCH_OP(LOP_COVERAGE), \
|
||||||
VM_DISPATCH_OP(LOP_CAPTURE), VM_DISPATCH_OP(LOP_SUBRK), VM_DISPATCH_OP(LOP_DIVRK), VM_DISPATCH_OP(LOP_FASTCALL1), \
|
VM_DISPATCH_OP(LOP_CAPTURE), VM_DISPATCH_OP(LOP_SUBRK), VM_DISPATCH_OP(LOP_DIVRK), VM_DISPATCH_OP(LOP_FASTCALL1), \
|
||||||
|
@ -2539,12 +2539,6 @@ reentry:
|
||||||
VM_NEXT();
|
VM_NEXT();
|
||||||
}
|
}
|
||||||
|
|
||||||
VM_CASE(LOP_DEP_FORGLOOP_INEXT)
|
|
||||||
{
|
|
||||||
LUAU_ASSERT(!"Unsupported deprecated opcode");
|
|
||||||
LUAU_UNREACHABLE();
|
|
||||||
}
|
|
||||||
|
|
||||||
VM_CASE(LOP_FORGPREP_NEXT)
|
VM_CASE(LOP_FORGPREP_NEXT)
|
||||||
{
|
{
|
||||||
Instruction insn = *pc++;
|
Instruction insn = *pc++;
|
||||||
|
@ -3013,6 +3007,60 @@ reentry:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
VM_CASE(LOP_FASTCALL3)
|
||||||
|
{
|
||||||
|
Instruction insn = *pc++;
|
||||||
|
int bfid = LUAU_INSN_A(insn);
|
||||||
|
int skip = LUAU_INSN_C(insn) - 1;
|
||||||
|
uint32_t aux = *pc++;
|
||||||
|
TValue* arg1 = VM_REG(LUAU_INSN_B(insn));
|
||||||
|
TValue* arg2 = VM_REG(aux & 0xff);
|
||||||
|
TValue* arg3 = VM_REG((aux >> 8) & 0xff);
|
||||||
|
|
||||||
|
LUAU_ASSERT(unsigned(pc - cl->l.p->code + skip) < unsigned(cl->l.p->sizecode));
|
||||||
|
|
||||||
|
Instruction call = pc[skip];
|
||||||
|
LUAU_ASSERT(LUAU_INSN_OP(call) == LOP_CALL);
|
||||||
|
|
||||||
|
StkId ra = VM_REG(LUAU_INSN_A(call));
|
||||||
|
|
||||||
|
int nparams = 3;
|
||||||
|
int nresults = LUAU_INSN_C(call) - 1;
|
||||||
|
|
||||||
|
luau_FastFunction f = luauF_table[bfid];
|
||||||
|
LUAU_ASSERT(f);
|
||||||
|
|
||||||
|
if (cl->env->safeenv)
|
||||||
|
{
|
||||||
|
VM_PROTECT_PC(); // f may fail due to OOM
|
||||||
|
|
||||||
|
setobj2s(L, L->top, arg2);
|
||||||
|
setobj2s(L, L->top + 1, arg3);
|
||||||
|
|
||||||
|
int n = f(L, ra, arg1, nresults, L->top, nparams);
|
||||||
|
|
||||||
|
if (n >= 0)
|
||||||
|
{
|
||||||
|
if (nresults == LUA_MULTRET)
|
||||||
|
L->top = ra + n;
|
||||||
|
|
||||||
|
pc += skip + 1; // skip instructions that compute function as well as CALL
|
||||||
|
LUAU_ASSERT(unsigned(pc - cl->l.p->code) < unsigned(cl->l.p->sizecode));
|
||||||
|
VM_NEXT();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// continue execution through the fallback code
|
||||||
|
VM_NEXT();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// continue execution through the fallback code
|
||||||
|
VM_NEXT();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
VM_CASE(LOP_BREAK)
|
VM_CASE(LOP_BREAK)
|
||||||
{
|
{
|
||||||
LUAU_ASSERT(cl->l.p->debuginsn);
|
LUAU_ASSERT(cl->l.p->debuginsn);
|
||||||
|
|
|
@ -13,7 +13,6 @@
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauLoadTypeInfo)
|
|
||||||
LUAU_FASTFLAGVARIABLE(LuauLoadUserdataInfo, false)
|
LUAU_FASTFLAGVARIABLE(LuauLoadUserdataInfo, false)
|
||||||
|
|
||||||
// TODO: RAII deallocation doesn't work for longjmp builds if a memory error happens
|
// TODO: RAII deallocation doesn't work for longjmp builds if a memory error happens
|
||||||
|
@ -357,70 +356,11 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
|
||||||
{
|
{
|
||||||
p->flags = read<uint8_t>(data, size, offset);
|
p->flags = read<uint8_t>(data, size, offset);
|
||||||
|
|
||||||
if (FFlag::LuauLoadTypeInfo)
|
if (typesversion == 1)
|
||||||
{
|
|
||||||
if (typesversion == 1)
|
|
||||||
{
|
|
||||||
uint32_t typesize = readVarInt(data, size, offset);
|
|
||||||
|
|
||||||
if (typesize)
|
|
||||||
{
|
|
||||||
uint8_t* types = (uint8_t*)data + offset;
|
|
||||||
|
|
||||||
LUAU_ASSERT(typesize == unsigned(2 + p->numparams));
|
|
||||||
LUAU_ASSERT(types[0] == LBC_TYPE_FUNCTION);
|
|
||||||
LUAU_ASSERT(types[1] == p->numparams);
|
|
||||||
|
|
||||||
// transform v1 into v2 format
|
|
||||||
int headersize = typesize > 127 ? 4 : 3;
|
|
||||||
|
|
||||||
p->typeinfo = luaM_newarray(L, headersize + typesize, uint8_t, p->memcat);
|
|
||||||
p->sizetypeinfo = headersize + typesize;
|
|
||||||
|
|
||||||
if (headersize == 4)
|
|
||||||
{
|
|
||||||
p->typeinfo[0] = (typesize & 127) | (1 << 7);
|
|
||||||
p->typeinfo[1] = typesize >> 7;
|
|
||||||
p->typeinfo[2] = 0;
|
|
||||||
p->typeinfo[3] = 0;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
p->typeinfo[0] = uint8_t(typesize);
|
|
||||||
p->typeinfo[1] = 0;
|
|
||||||
p->typeinfo[2] = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
memcpy(p->typeinfo + headersize, types, typesize);
|
|
||||||
}
|
|
||||||
|
|
||||||
offset += typesize;
|
|
||||||
}
|
|
||||||
else if (typesversion == 2 || (FFlag::LuauLoadUserdataInfo && typesversion == 3))
|
|
||||||
{
|
|
||||||
uint32_t typesize = readVarInt(data, size, offset);
|
|
||||||
|
|
||||||
if (typesize)
|
|
||||||
{
|
|
||||||
uint8_t* types = (uint8_t*)data + offset;
|
|
||||||
|
|
||||||
p->typeinfo = luaM_newarray(L, typesize, uint8_t, p->memcat);
|
|
||||||
p->sizetypeinfo = typesize;
|
|
||||||
memcpy(p->typeinfo, types, typesize);
|
|
||||||
offset += typesize;
|
|
||||||
|
|
||||||
if (FFlag::LuauLoadUserdataInfo && typesversion == 3)
|
|
||||||
{
|
|
||||||
remapUserdataTypes((char*)(uint8_t*)p->typeinfo, p->sizetypeinfo, userdataRemapping, userdataTypeLimit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
uint32_t typesize = readVarInt(data, size, offset);
|
uint32_t typesize = readVarInt(data, size, offset);
|
||||||
|
|
||||||
if (typesize && typesversion == LBC_TYPE_VERSION_DEPRECATED)
|
if (typesize)
|
||||||
{
|
{
|
||||||
uint8_t* types = (uint8_t*)data + offset;
|
uint8_t* types = (uint8_t*)data + offset;
|
||||||
|
|
||||||
|
@ -428,12 +368,50 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
|
||||||
LUAU_ASSERT(types[0] == LBC_TYPE_FUNCTION);
|
LUAU_ASSERT(types[0] == LBC_TYPE_FUNCTION);
|
||||||
LUAU_ASSERT(types[1] == p->numparams);
|
LUAU_ASSERT(types[1] == p->numparams);
|
||||||
|
|
||||||
p->typeinfo = luaM_newarray(L, typesize, uint8_t, p->memcat);
|
// transform v1 into v2 format
|
||||||
memcpy(p->typeinfo, types, typesize);
|
int headersize = typesize > 127 ? 4 : 3;
|
||||||
|
|
||||||
|
p->typeinfo = luaM_newarray(L, headersize + typesize, uint8_t, p->memcat);
|
||||||
|
p->sizetypeinfo = headersize + typesize;
|
||||||
|
|
||||||
|
if (headersize == 4)
|
||||||
|
{
|
||||||
|
p->typeinfo[0] = (typesize & 127) | (1 << 7);
|
||||||
|
p->typeinfo[1] = typesize >> 7;
|
||||||
|
p->typeinfo[2] = 0;
|
||||||
|
p->typeinfo[3] = 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
p->typeinfo[0] = uint8_t(typesize);
|
||||||
|
p->typeinfo[1] = 0;
|
||||||
|
p->typeinfo[2] = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
memcpy(p->typeinfo + headersize, types, typesize);
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += typesize;
|
offset += typesize;
|
||||||
}
|
}
|
||||||
|
else if (typesversion == 2 || (FFlag::LuauLoadUserdataInfo && typesversion == 3))
|
||||||
|
{
|
||||||
|
uint32_t typesize = readVarInt(data, size, offset);
|
||||||
|
|
||||||
|
if (typesize)
|
||||||
|
{
|
||||||
|
uint8_t* types = (uint8_t*)data + offset;
|
||||||
|
|
||||||
|
p->typeinfo = luaM_newarray(L, typesize, uint8_t, p->memcat);
|
||||||
|
p->sizetypeinfo = typesize;
|
||||||
|
memcpy(p->typeinfo, types, typesize);
|
||||||
|
offset += typesize;
|
||||||
|
|
||||||
|
if (FFlag::LuauLoadUserdataInfo && typesversion == 3)
|
||||||
|
{
|
||||||
|
remapUserdataTypes((char*)(uint8_t*)p->typeinfo, p->sizetypeinfo, userdataRemapping, userdataTypeLimit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const int sizecode = readVarInt(data, size, offset);
|
const int sizecode = readVarInt(data, size, offset);
|
||||||
|
|
|
@ -25,6 +25,7 @@ LUAU_FASTINT(LuauRecursionLimit)
|
||||||
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
||||||
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
|
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
|
||||||
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
||||||
|
LUAU_FASTFLAG(LuauCompileFastcall3)
|
||||||
|
|
||||||
using namespace Luau;
|
using namespace Luau;
|
||||||
|
|
||||||
|
@ -3486,6 +3487,33 @@ RETURN R1 -1
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE("Fastcall3")
|
||||||
|
{
|
||||||
|
ScopedFastFlag luauCompileFastcall3{FFlag::LuauCompileFastcall3, true};
|
||||||
|
|
||||||
|
CHECK_EQ("\n" + compileFunction0(R"(
|
||||||
|
local a, b, c = ...
|
||||||
|
return math.min(a, b, c) + math.clamp(a, b, c)
|
||||||
|
)"),
|
||||||
|
R"(
|
||||||
|
GETVARARGS R0 3
|
||||||
|
FASTCALL3 19 R0 R1 R2 L0
|
||||||
|
MOVE R5 R0
|
||||||
|
MOVE R6 R1
|
||||||
|
MOVE R7 R2
|
||||||
|
GETIMPORT R4 2 [math.min]
|
||||||
|
CALL R4 3 1
|
||||||
|
L0: FASTCALL3 46 R0 R1 R2 L1
|
||||||
|
MOVE R6 R0
|
||||||
|
MOVE R7 R1
|
||||||
|
MOVE R8 R2
|
||||||
|
GETIMPORT R5 4 [math.clamp]
|
||||||
|
CALL R5 3 1
|
||||||
|
L1: ADD R3 R4 R5
|
||||||
|
RETURN R3 1
|
||||||
|
)");
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE("FastcallSelect")
|
TEST_CASE("FastcallSelect")
|
||||||
{
|
{
|
||||||
// select(_, ...) compiles to a builtin call
|
// select(_, ...) compiles to a builtin call
|
||||||
|
@ -4668,6 +4696,34 @@ L0: RETURN R0 -1
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE("VectorFastCall3")
|
||||||
|
{
|
||||||
|
ScopedFastFlag luauCompileFastcall3{FFlag::LuauCompileFastcall3, true};
|
||||||
|
|
||||||
|
const char* source = R"(
|
||||||
|
local a, b, c = ...
|
||||||
|
return Vector3.new(a, b, c)
|
||||||
|
)";
|
||||||
|
|
||||||
|
Luau::BytecodeBuilder bcb;
|
||||||
|
bcb.setDumpFlags(Luau::BytecodeBuilder::Dump_Code);
|
||||||
|
Luau::CompileOptions options;
|
||||||
|
options.vectorLib = "Vector3";
|
||||||
|
options.vectorCtor = "new";
|
||||||
|
Luau::compileOrThrow(bcb, source, options);
|
||||||
|
|
||||||
|
CHECK_EQ("\n" + bcb.dumpFunction(0), R"(
|
||||||
|
GETVARARGS R0 3
|
||||||
|
FASTCALL3 54 R0 R1 R2 L0
|
||||||
|
MOVE R4 R0
|
||||||
|
MOVE R5 R1
|
||||||
|
MOVE R6 R2
|
||||||
|
GETIMPORT R3 2 [Vector3.new]
|
||||||
|
CALL R3 3 -1
|
||||||
|
L0: RETURN R3 -1
|
||||||
|
)");
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE("VectorLiterals")
|
TEST_CASE("VectorLiterals")
|
||||||
{
|
{
|
||||||
CHECK_EQ("\n" + compileFunction("return Vector3.new(1, 2, 3)", 0, 2, /*enableVectors*/ true), R"(
|
CHECK_EQ("\n" + compileFunction("return Vector3.new(1, 2, 3)", 0, 2, /*enableVectors*/ true), R"(
|
||||||
|
|
|
@ -48,7 +48,8 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "binary_op_type_family_errors")
|
||||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||||
|
|
||||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
CHECK_EQ("Operator '+' could not be applied to operands of types number and string; there is no corresponding overload for __add", toString(result.errors[0]));
|
CHECK_EQ("Operator '+' could not be applied to operands of types number and string; there is no corresponding overload for __add",
|
||||||
|
toString(result.errors[0]));
|
||||||
else
|
else
|
||||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||||
}
|
}
|
||||||
|
@ -66,7 +67,8 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "unary_op_type_family_errors")
|
||||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
{
|
{
|
||||||
LUAU_REQUIRE_ERROR_COUNT(2, result);
|
LUAU_REQUIRE_ERROR_COUNT(2, result);
|
||||||
CHECK_EQ("Operator '-' could not be applied to operand of type string; there is no corresponding overload for __unm", toString(result.errors[0]));
|
CHECK_EQ(
|
||||||
|
"Operator '-' could not be applied to operand of type string; there is no corresponding overload for __unm", toString(result.errors[0]));
|
||||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[1]));
|
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[1]));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
|
@ -241,3 +241,21 @@ using DifferFixtureWithBuiltins = DifferFixtureGeneric<BuiltinsFixture>;
|
||||||
} while (false)
|
} while (false)
|
||||||
|
|
||||||
#define LUAU_REQUIRE_NO_ERRORS(result) LUAU_REQUIRE_ERROR_COUNT(0, result)
|
#define LUAU_REQUIRE_NO_ERRORS(result) LUAU_REQUIRE_ERROR_COUNT(0, result)
|
||||||
|
|
||||||
|
#define LUAU_CHECK_ERRORS(result) \
|
||||||
|
do \
|
||||||
|
{ \
|
||||||
|
auto&& r = (result); \
|
||||||
|
validateErrors(r.errors); \
|
||||||
|
CHECK(!r.errors.empty()); \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
|
#define LUAU_CHECK_ERROR_COUNT(count, result) \
|
||||||
|
do \
|
||||||
|
{ \
|
||||||
|
auto&& r = (result); \
|
||||||
|
validateErrors(r.errors); \
|
||||||
|
CHECK_MESSAGE(count == r.errors.size(), getErrors(r)); \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
|
#define LUAU_CHECK_NO_ERRORS(result) LUAU_CHECK_ERROR_COUNT(0, result)
|
||||||
|
|
|
@ -125,11 +125,7 @@ TEST_CASE_FIXTURE(GeneralizationFixture, "cache_fully_generalized_types")
|
||||||
CHECK(generalizedTypes->empty());
|
CHECK(generalizedTypes->empty());
|
||||||
|
|
||||||
TypeId tinyTable = arena.addType(TableType{
|
TypeId tinyTable = arena.addType(TableType{
|
||||||
TableType::Props{{"one", builtinTypes.numberType}, {"two", builtinTypes.stringType}},
|
TableType::Props{{"one", builtinTypes.numberType}, {"two", builtinTypes.stringType}}, std::nullopt, TypeLevel{}, TableState::Sealed});
|
||||||
std::nullopt,
|
|
||||||
TypeLevel{},
|
|
||||||
TableState::Sealed
|
|
||||||
});
|
|
||||||
|
|
||||||
generalize(tinyTable);
|
generalize(tinyTable);
|
||||||
|
|
||||||
|
@ -142,17 +138,10 @@ TEST_CASE_FIXTURE(GeneralizationFixture, "dont_cache_types_that_arent_done_yet")
|
||||||
{
|
{
|
||||||
TypeId freeTy = arena.addType(FreeType{NotNull{globalScope.get()}, builtinTypes.neverType, builtinTypes.stringType});
|
TypeId freeTy = arena.addType(FreeType{NotNull{globalScope.get()}, builtinTypes.neverType, builtinTypes.stringType});
|
||||||
|
|
||||||
TypeId fnTy = arena.addType(FunctionType{
|
TypeId fnTy = arena.addType(FunctionType{builtinTypes.emptyTypePack, arena.addTypePack(TypePack{{builtinTypes.numberType}})});
|
||||||
builtinTypes.emptyTypePack,
|
|
||||||
arena.addTypePack(TypePack{{builtinTypes.numberType}})
|
|
||||||
});
|
|
||||||
|
|
||||||
TypeId tableTy = arena.addType(TableType{
|
TypeId tableTy = arena.addType(TableType{
|
||||||
TableType::Props{{"one", builtinTypes.numberType}, {"two", freeTy}, {"three", fnTy}},
|
TableType::Props{{"one", builtinTypes.numberType}, {"two", freeTy}, {"three", fnTy}}, std::nullopt, TypeLevel{}, TableState::Sealed});
|
||||||
std::nullopt,
|
|
||||||
TypeLevel{},
|
|
||||||
TableState::Sealed
|
|
||||||
});
|
|
||||||
|
|
||||||
generalize(tableTy);
|
generalize(tableTy);
|
||||||
|
|
||||||
|
@ -174,11 +163,7 @@ TEST_CASE_FIXTURE(GeneralizationFixture, "functions_containing_cyclic_tables_can
|
||||||
});
|
});
|
||||||
|
|
||||||
asMutable(selfTy)->ty.emplace<TableType>(
|
asMutable(selfTy)->ty.emplace<TableType>(
|
||||||
TableType::Props{{"count", builtinTypes.numberType}, {"method", methodTy}},
|
TableType::Props{{"count", builtinTypes.numberType}, {"method", methodTy}}, std::nullopt, TypeLevel{}, TableState::Sealed);
|
||||||
std::nullopt,
|
|
||||||
TypeLevel{},
|
|
||||||
TableState::Sealed
|
|
||||||
);
|
|
||||||
|
|
||||||
generalize(methodTy);
|
generalize(methodTy);
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,10 @@
|
||||||
|
|
||||||
#include <limits.h>
|
#include <limits.h>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
LUAU_FASTFLAG(DebugLuauAbortingChecks)
|
LUAU_FASTFLAG(DebugLuauAbortingChecks)
|
||||||
LUAU_FASTFLAG(LuauCodegenFixSplitStoreConstMismatch)
|
LUAU_FASTFLAG(LuauCodegenFixSplitStoreConstMismatch)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenInstG)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
using namespace Luau::CodeGen;
|
using namespace Luau::CodeGen;
|
||||||
|
|
||||||
|
@ -1117,6 +1118,8 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "BuiltinFastcallsMayInvalidateMemory")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "BuiltinFastcallsMayInvalidateMemory")
|
||||||
{
|
{
|
||||||
|
ScopedFastFlag luauCodegenInstG{FFlag::LuauCodegenInstG, true};
|
||||||
|
|
||||||
IrOp block = build.block(IrBlockKind::Internal);
|
IrOp block = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
|
|
||||||
|
@ -1129,8 +1132,8 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "BuiltinFastcallsMayInvalidateMemory")
|
||||||
build.inst(IrCmd::CHECK_NO_METATABLE, table, fallback);
|
build.inst(IrCmd::CHECK_NO_METATABLE, table, fallback);
|
||||||
build.inst(IrCmd::CHECK_READONLY, table, fallback);
|
build.inst(IrCmd::CHECK_READONLY, table, fallback);
|
||||||
|
|
||||||
build.inst(IrCmd::INVOKE_FASTCALL, build.constUint(LBF_SETMETATABLE), build.vmReg(1), build.vmReg(2), build.vmReg(3), build.constInt(3),
|
build.inst(IrCmd::INVOKE_FASTCALL, build.constUint(LBF_SETMETATABLE), build.vmReg(1), build.vmReg(2), build.vmReg(3), build.undef(),
|
||||||
build.constInt(1));
|
build.constInt(3), build.constInt(1));
|
||||||
|
|
||||||
build.inst(IrCmd::CHECK_NO_METATABLE, table, fallback);
|
build.inst(IrCmd::CHECK_NO_METATABLE, table, fallback);
|
||||||
build.inst(IrCmd::CHECK_READONLY, table, fallback);
|
build.inst(IrCmd::CHECK_READONLY, table, fallback);
|
||||||
|
@ -1151,7 +1154,7 @@ bb_0:
|
||||||
%1 = LOAD_POINTER R0
|
%1 = LOAD_POINTER R0
|
||||||
CHECK_NO_METATABLE %1, bb_fallback_1
|
CHECK_NO_METATABLE %1, bb_fallback_1
|
||||||
CHECK_READONLY %1, bb_fallback_1
|
CHECK_READONLY %1, bb_fallback_1
|
||||||
%4 = INVOKE_FASTCALL 61u, R1, R2, R3, 3i, 1i
|
%4 = INVOKE_FASTCALL 61u, R1, R2, R3, undef, 3i, 1i
|
||||||
CHECK_NO_METATABLE %1, bb_fallback_1
|
CHECK_NO_METATABLE %1, bb_fallback_1
|
||||||
CHECK_READONLY %1, bb_fallback_1
|
CHECK_READONLY %1, bb_fallback_1
|
||||||
STORE_DOUBLE R1, 0.5
|
STORE_DOUBLE R1, 0.5
|
||||||
|
@ -2546,8 +2549,6 @@ bb_0: ; useCount: 0
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "ForgprepInvalidation")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "ForgprepInvalidation")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp block = build.block(IrBlockKind::Internal);
|
IrOp block = build.block(IrBlockKind::Internal);
|
||||||
IrOp followup = build.block(IrBlockKind::Internal);
|
IrOp followup = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
|
@ -2587,14 +2588,14 @@ bb_1:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects1")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects1")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
ScopedFastFlag luauCodegenFastcall3{FFlag::LuauCodegenFastcall3, true};
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::FASTCALL, build.constUint(LBF_MATH_FREXP), build.vmReg(1), build.vmReg(2), build.undef(), build.constInt(1), build.constInt(2));
|
build.inst(IrCmd::FASTCALL, build.constUint(LBF_MATH_FREXP), build.vmReg(1), build.vmReg(2), build.constInt(2));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(1), build.constTag(tnumber), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(1)), build.constTag(tnumber), build.vmExit(1));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(2), build.constTag(tnumber), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(2)), build.constTag(tnumber), build.vmExit(1));
|
||||||
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(2));
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(2));
|
||||||
|
|
||||||
updateUseCounts(build.function);
|
updateUseCounts(build.function);
|
||||||
|
@ -2604,7 +2605,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects1")
|
||||||
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
||||||
bb_0:
|
bb_0:
|
||||||
; in regs: R2
|
; in regs: R2
|
||||||
FASTCALL 14u, R1, R2, undef, 1i, 2i
|
FASTCALL 14u, R1, R2, 2i
|
||||||
RETURN R1, 2i
|
RETURN R1, 2i
|
||||||
|
|
||||||
)");
|
)");
|
||||||
|
@ -2612,14 +2613,14 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects2")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
ScopedFastFlag luauCodegenFastcall3{FFlag::LuauCodegenFastcall3, true};
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::FASTCALL, build.constUint(LBF_MATH_MODF), build.vmReg(1), build.vmReg(2), build.undef(), build.constInt(1), build.constInt(1));
|
build.inst(IrCmd::FASTCALL, build.constUint(LBF_MATH_MODF), build.vmReg(1), build.vmReg(2), build.constInt(1));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(1), build.constTag(tnumber), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(1)), build.constTag(tnumber), build.vmExit(1));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(2), build.constTag(tnumber), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(2)), build.constTag(tnumber), build.vmExit(1));
|
||||||
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(2));
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(2));
|
||||||
|
|
||||||
updateUseCounts(build.function);
|
updateUseCounts(build.function);
|
||||||
|
@ -2629,8 +2630,9 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "FastCallEffects2")
|
||||||
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
||||||
bb_0:
|
bb_0:
|
||||||
; in regs: R2
|
; in regs: R2
|
||||||
FASTCALL 20u, R1, R2, undef, 1i, 1i
|
FASTCALL 20u, R1, R2, 1i
|
||||||
CHECK_TAG R2, tnumber, exit(1)
|
%3 = LOAD_TAG R2
|
||||||
|
CHECK_TAG %3, tnumber, exit(1)
|
||||||
RETURN R1, 2i
|
RETURN R1, 2i
|
||||||
|
|
||||||
)");
|
)");
|
||||||
|
@ -2642,7 +2644,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "InferNumberTagFromLimitedContext")
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.constDouble(2.0));
|
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.constDouble(2.0));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(0), build.constTag(ttable), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(0)), build.constTag(ttable), build.vmExit(1));
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
||||||
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
||||||
|
|
||||||
|
@ -2666,7 +2668,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotProduceInvalidSplitStore1")
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.constInt(1));
|
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.constInt(1));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(0), build.constTag(ttable), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(0)), build.constTag(ttable), build.vmExit(1));
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
||||||
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
||||||
|
|
||||||
|
@ -2677,9 +2679,10 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotProduceInvalidSplitStore1")
|
||||||
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
||||||
bb_0:
|
bb_0:
|
||||||
STORE_INT R0, 1i
|
STORE_INT R0, 1i
|
||||||
CHECK_TAG R0, ttable, exit(1)
|
%1 = LOAD_TAG R0
|
||||||
%2 = LOAD_TVALUE R0
|
CHECK_TAG %1, ttable, exit(1)
|
||||||
STORE_TVALUE R1, %2
|
%3 = LOAD_TVALUE R0
|
||||||
|
STORE_TVALUE R1, %3
|
||||||
RETURN R1, 1i
|
RETURN R1, 1i
|
||||||
|
|
||||||
)");
|
)");
|
||||||
|
@ -2693,7 +2696,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotProduceInvalidSplitStore2")
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.constInt(1));
|
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.constInt(1));
|
||||||
build.inst(IrCmd::CHECK_TAG, build.vmReg(0), build.constTag(tnumber), build.vmExit(1));
|
build.inst(IrCmd::CHECK_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(0)), build.constTag(tnumber), build.vmExit(1));
|
||||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
build.inst(IrCmd::STORE_TVALUE, build.vmReg(1), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
||||||
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
build.inst(IrCmd::RETURN, build.vmReg(1), build.constInt(1));
|
||||||
|
|
||||||
|
@ -2704,9 +2707,10 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotProduceInvalidSplitStore2")
|
||||||
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
CHECK("\n" + toString(build.function, IncludeUseInfo::No) == R"(
|
||||||
bb_0:
|
bb_0:
|
||||||
STORE_INT R0, 1i
|
STORE_INT R0, 1i
|
||||||
CHECK_TAG R0, tnumber, exit(1)
|
%1 = LOAD_TAG R0
|
||||||
%2 = LOAD_TVALUE R0
|
CHECK_TAG %1, tnumber, exit(1)
|
||||||
STORE_TVALUE R1, %2
|
%3 = LOAD_TVALUE R0
|
||||||
|
STORE_TVALUE R1, %3
|
||||||
RETURN R1, 1i
|
RETURN R1, 1i
|
||||||
|
|
||||||
)");
|
)");
|
||||||
|
@ -2809,13 +2813,16 @@ bb_1:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "ExplicitUseOfRegisterInVarargSequence")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "ExplicitUseOfRegisterInVarargSequence")
|
||||||
{
|
{
|
||||||
|
ScopedFastFlag luauCodegenInstG{FFlag::LuauCodegenInstG, true};
|
||||||
|
ScopedFastFlag luauCodegenFastcall3{FFlag::LuauCodegenFastcall3, true};
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp exit = build.block(IrBlockKind::Internal);
|
IrOp exit = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
build.inst(IrCmd::FALLBACK_GETVARARGS, build.constUint(0), build.vmReg(1), build.constInt(-1));
|
build.inst(IrCmd::FALLBACK_GETVARARGS, build.constUint(0), build.vmReg(1), build.constInt(-1));
|
||||||
IrOp results = build.inst(
|
IrOp results = build.inst(IrCmd::INVOKE_FASTCALL, build.constUint(0), build.vmReg(0), build.vmReg(1), build.vmReg(2), build.undef(),
|
||||||
IrCmd::INVOKE_FASTCALL, build.constUint(0), build.vmReg(0), build.vmReg(1), build.vmReg(2), build.constInt(-1), build.constInt(-1));
|
build.constInt(-1), build.constInt(-1));
|
||||||
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(0), results);
|
build.inst(IrCmd::ADJUST_STACK_TO_REG, build.vmReg(0), results);
|
||||||
build.inst(IrCmd::JUMP, exit);
|
build.inst(IrCmd::JUMP, exit);
|
||||||
|
|
||||||
|
@ -2830,7 +2837,7 @@ bb_0:
|
||||||
; successors: bb_1
|
; successors: bb_1
|
||||||
; out regs: R0...
|
; out regs: R0...
|
||||||
FALLBACK_GETVARARGS 0u, R1, -1i
|
FALLBACK_GETVARARGS 0u, R1, -1i
|
||||||
%1 = INVOKE_FASTCALL 0u, R0, R1, R2, -1i, -1i
|
%1 = INVOKE_FASTCALL 0u, R0, R1, R2, undef, -1i, -1i
|
||||||
ADJUST_STACK_TO_REG R0, %1
|
ADJUST_STACK_TO_REG R0, %1
|
||||||
JUMP bb_1
|
JUMP bb_1
|
||||||
|
|
||||||
|
@ -3023,8 +3030,6 @@ bb_1:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "ForgprepImplicitUse")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "ForgprepImplicitUse")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp direct = build.block(IrBlockKind::Internal);
|
IrOp direct = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Internal);
|
IrOp fallback = build.block(IrBlockKind::Internal);
|
||||||
|
@ -3585,8 +3590,6 @@ TEST_SUITE_BEGIN("DeadStoreRemoval");
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "SimpleDoubleStore")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "SimpleDoubleStore")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3636,8 +3639,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "UnusedAtReturn")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "UnusedAtReturn")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3669,8 +3670,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "UnusedAtReturnPartial")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "UnusedAtReturnPartial")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3699,8 +3698,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse1")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse1")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3729,8 +3726,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse2")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3763,8 +3758,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse3")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse3")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3793,8 +3786,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse4")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "HiddenPointerUse4")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3827,8 +3818,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "PartialVsFullStoresWithRecombination")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "PartialVsFullStoresWithRecombination")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3852,8 +3841,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "IgnoreFastcallAdjustment")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "IgnoreFastcallAdjustment")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3880,8 +3867,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "JumpImplicitLiveOut")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "JumpImplicitLiveOut")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp next = build.block(IrBlockKind::Internal);
|
IrOp next = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
|
@ -3917,8 +3902,6 @@ bb_1:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "KeepCapturedRegisterStores")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "KeepCapturedRegisterStores")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
@ -3956,7 +3939,6 @@ bb_0:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "StoreCannotBeReplacedWithCheck")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "StoreCannotBeReplacedWithCheck")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
ScopedFastFlag debugLuauAbortingChecks{FFlag::DebugLuauAbortingChecks, true};
|
ScopedFastFlag debugLuauAbortingChecks{FFlag::DebugLuauAbortingChecks, true};
|
||||||
|
|
||||||
IrOp block = build.block(IrBlockKind::Internal);
|
IrOp block = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4025,8 +4007,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
IrOp last = build.block(IrBlockKind::Internal);
|
IrOp last = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4083,8 +4063,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks2")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
IrOp last = build.block(IrBlockKind::Internal);
|
IrOp last = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4139,8 +4117,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks3")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "FullStoreHasToBeObservableFromFallbacks3")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
IrOp last = build.block(IrBlockKind::Internal);
|
IrOp last = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4198,8 +4174,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "SafePartialValueStoresWithPreservedTag")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "SafePartialValueStoresWithPreservedTag")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
IrOp last = build.block(IrBlockKind::Internal);
|
IrOp last = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4253,8 +4227,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "SafePartialValueStoresWithPreservedTag2")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "SafePartialValueStoresWithPreservedTag2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||||
IrOp last = build.block(IrBlockKind::Internal);
|
IrOp last = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4307,8 +4279,6 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotReturnWithPartialStores")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "DoNotReturnWithPartialStores")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
IrOp success = build.block(IrBlockKind::Internal);
|
IrOp success = build.block(IrBlockKind::Internal);
|
||||||
IrOp fail = build.block(IrBlockKind::Internal);
|
IrOp fail = build.block(IrBlockKind::Internal);
|
||||||
|
@ -4379,8 +4349,6 @@ bb_3:
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(IrBuilderFixture, "PartialOverFullValue")
|
TEST_CASE_FIXTURE(IrBuilderFixture, "PartialOverFullValue")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
IrOp entry = build.block(IrBlockKind::Internal);
|
IrOp entry = build.block(IrBlockKind::Internal);
|
||||||
|
|
||||||
build.beginBlock(entry);
|
build.beginBlock(entry);
|
||||||
|
|
|
@ -15,17 +15,15 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
|
|
||||||
LUAU_FASTFLAG(LuauCodegenRemoveDeadStores5)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenDirectUserdataFlow)
|
|
||||||
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
LUAU_FASTFLAG(LuauCompileTypeInfo)
|
||||||
LUAU_FASTFLAG(LuauLoadTypeInfo)
|
|
||||||
LUAU_FASTFLAG(LuauCodegenTypeInfo)
|
|
||||||
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
|
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
|
||||||
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
LUAU_FASTFLAG(LuauCodegenAnalyzeHostVectorOps)
|
||||||
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
LUAU_FASTFLAG(LuauCompileUserdataInfo)
|
||||||
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
LUAU_FASTFLAG(LuauLoadUserdataInfo)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
LUAU_FASTFLAG(LuauCodegenUserdataOps)
|
||||||
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
|
||||||
|
LUAU_FASTFLAG(LuauCompileFastcall3)
|
||||||
|
LUAU_FASTFLAG(LuauCodegenFastcall3)
|
||||||
|
|
||||||
static std::string getCodegenAssembly(const char* source, bool includeIrTypes = false, int debugLevel = 1)
|
static std::string getCodegenAssembly(const char* source, bool includeIrTypes = false, int debugLevel = 1)
|
||||||
{
|
{
|
||||||
|
@ -159,8 +157,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorComponentRead")
|
TEST_CASE("VectorComponentRead")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function compsum(a: vector)
|
local function compsum(a: vector)
|
||||||
return a.X + a.Y + a.Z
|
return a.X + a.Y + a.Z
|
||||||
|
@ -238,8 +234,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorSubMulDiv")
|
TEST_CASE("VectorSubMulDiv")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vec3combo(a: vector, b: vector, c: vector, d: vector)
|
local function vec3combo(a: vector, b: vector, c: vector, d: vector)
|
||||||
return a * b - c / d
|
return a * b - c / d
|
||||||
|
@ -272,8 +266,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorSubMulDiv2")
|
TEST_CASE("VectorSubMulDiv2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vec3combo(a: vector)
|
local function vec3combo(a: vector)
|
||||||
local tmp = a * a
|
local tmp = a * a
|
||||||
|
@ -302,8 +294,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorMulDivMixed")
|
TEST_CASE("VectorMulDivMixed")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vec3combo(a: vector, b: vector, c: vector, d: vector)
|
local function vec3combo(a: vector, b: vector, c: vector, d: vector)
|
||||||
return a * 2 + b / 4 + 0.5 * c + 40 / d
|
return a * 2 + b / 4 + 0.5 * c + 40 / d
|
||||||
|
@ -344,8 +334,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("ExtraMathMemoryOperands")
|
TEST_CASE("ExtraMathMemoryOperands")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: number, b: number, c: number, d: number, e: number)
|
local function foo(a: number, b: number, c: number, d: number, e: number)
|
||||||
return math.floor(a) + math.ceil(b) + math.round(c) + math.sqrt(d) + math.abs(e)
|
return math.floor(a) + math.ceil(b) + math.round(c) + math.sqrt(d) + math.abs(e)
|
||||||
|
@ -382,8 +370,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("DseInitialStackState")
|
TEST_CASE("DseInitialStackState")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo()
|
local function foo()
|
||||||
while {} do
|
while {} do
|
||||||
|
@ -422,7 +408,7 @@ bb_5:
|
||||||
|
|
||||||
TEST_CASE("DseInitialStackState2")
|
TEST_CASE("DseInitialStackState2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
ScopedFastFlag luauCodegenFastcall3{FFlag::LuauCodegenFastcall3, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a)
|
local function foo(a)
|
||||||
|
@ -435,7 +421,7 @@ end
|
||||||
bb_bytecode_0:
|
bb_bytecode_0:
|
||||||
CHECK_SAFE_ENV exit(1)
|
CHECK_SAFE_ENV exit(1)
|
||||||
CHECK_TAG R0, tnumber, exit(1)
|
CHECK_TAG R0, tnumber, exit(1)
|
||||||
FASTCALL 14u, R1, R0, undef, 1i, 2i
|
FASTCALL 14u, R1, R0, 2i
|
||||||
INTERRUPT 5u
|
INTERRUPT 5u
|
||||||
RETURN R0, 1i
|
RETURN R0, 1i
|
||||||
)");
|
)");
|
||||||
|
@ -443,7 +429,7 @@ bb_bytecode_0:
|
||||||
|
|
||||||
TEST_CASE("DseInitialStackState3")
|
TEST_CASE("DseInitialStackState3")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
ScopedFastFlag luauCodegenFastcall3{FFlag::LuauCodegenFastcall3, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a)
|
local function foo(a)
|
||||||
|
@ -456,7 +442,7 @@ end
|
||||||
bb_bytecode_0:
|
bb_bytecode_0:
|
||||||
CHECK_SAFE_ENV exit(1)
|
CHECK_SAFE_ENV exit(1)
|
||||||
CHECK_TAG R0, tnumber, exit(1)
|
CHECK_TAG R0, tnumber, exit(1)
|
||||||
FASTCALL 47u, R1, R0, undef, 1i, 1i
|
FASTCALL 47u, R1, R0, 1i
|
||||||
INTERRUPT 5u
|
INTERRUPT 5u
|
||||||
RETURN R0, 1i
|
RETURN R0, 1i
|
||||||
)");
|
)");
|
||||||
|
@ -464,8 +450,6 @@ bb_bytecode_0:
|
||||||
|
|
||||||
TEST_CASE("VectorConstantTag")
|
TEST_CASE("VectorConstantTag")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function vecrcp(a: vector)
|
local function vecrcp(a: vector)
|
||||||
return vector(1, 2, 3) + a
|
return vector(1, 2, 3) + a
|
||||||
|
@ -491,8 +475,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorNamecall")
|
TEST_CASE("VectorNamecall")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function abs(a: vector)
|
local function abs(a: vector)
|
||||||
return a:Abs()
|
return a:Abs()
|
||||||
|
@ -517,8 +499,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorRandomProp")
|
TEST_CASE("VectorRandomProp")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: vector)
|
local function foo(a: vector)
|
||||||
return a.XX + a.YY + a.ZZ
|
return a.XX + a.YY + a.ZZ
|
||||||
|
@ -559,7 +539,6 @@ bb_6:
|
||||||
|
|
||||||
TEST_CASE("VectorCustomAccess")
|
TEST_CASE("VectorCustomAccess")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -594,8 +573,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorCustomNamecall")
|
TEST_CASE("VectorCustomNamecall")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
ScopedFastFlag LuauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -634,8 +611,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorCustomAccessChain")
|
TEST_CASE("VectorCustomAccessChain")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
ScopedFastFlag LuauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -688,8 +663,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorCustomNamecallChain")
|
TEST_CASE("VectorCustomNamecallChain")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
|
|
||||||
ScopedFastFlag LuauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
ScopedFastFlag luauCodegenAnalyzeHostVectorOps{FFlag::LuauCodegenAnalyzeHostVectorOps, true};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
|
@ -749,9 +722,7 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("VectorCustomNamecallChain2")
|
TEST_CASE("VectorCustomNamecallChain2")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true},
|
|
||||||
{FFlag::LuauCodegenAnalyzeHostVectorOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
type Vertex = {n: vector, b: vector}
|
type Vertex = {n: vector, b: vector}
|
||||||
|
@ -827,8 +798,6 @@ bb_6:
|
||||||
|
|
||||||
TEST_CASE("UserDataGetIndex")
|
TEST_CASE("UserDataGetIndex")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function getxy(a: Point)
|
local function getxy(a: Point)
|
||||||
return a.x + a.y
|
return a.x + a.y
|
||||||
|
@ -859,8 +828,6 @@ bb_4:
|
||||||
|
|
||||||
TEST_CASE("UserDataSetIndex")
|
TEST_CASE("UserDataSetIndex")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function setxy(a: Point)
|
local function setxy(a: Point)
|
||||||
a.x = 3
|
a.x = 3
|
||||||
|
@ -887,8 +854,6 @@ bb_bytecode_1:
|
||||||
|
|
||||||
TEST_CASE("UserDataNamecall")
|
TEST_CASE("UserDataNamecall")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function getxy(a: Point)
|
local function getxy(a: Point)
|
||||||
return a:GetX() + a:GetY()
|
return a:GetX() + a:GetY()
|
||||||
|
@ -925,8 +890,7 @@ bb_4:
|
||||||
|
|
||||||
TEST_CASE("ExplicitUpvalueAndLocalTypes")
|
TEST_CASE("ExplicitUpvalueAndLocalTypes")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local y: vector = ...
|
local y: vector = ...
|
||||||
|
@ -969,8 +933,7 @@ bb_bytecode_0:
|
||||||
|
|
||||||
TEST_CASE("FastcallTypeInferThroughLocal")
|
TEST_CASE("FastcallTypeInferThroughLocal")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileFastcall3, true}, {FFlag::LuauCodegenFastcall3, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function getsum(x, c)
|
local function getsum(x, c)
|
||||||
|
@ -985,43 +948,40 @@ end
|
||||||
/* includeIrTypes */ true),
|
/* includeIrTypes */ true),
|
||||||
R"(
|
R"(
|
||||||
; function getsum($arg0, $arg1) line 2
|
; function getsum($arg0, $arg1) line 2
|
||||||
; R2: vector from 0 to 17
|
; R2: vector from 0 to 18
|
||||||
bb_bytecode_0:
|
bb_bytecode_0:
|
||||||
%0 = LOAD_TVALUE R0
|
|
||||||
STORE_TVALUE R3, %0
|
|
||||||
STORE_DOUBLE R4, 2
|
STORE_DOUBLE R4, 2
|
||||||
STORE_TAG R4, tnumber
|
STORE_TAG R4, tnumber
|
||||||
STORE_DOUBLE R5, 3
|
STORE_DOUBLE R5, 3
|
||||||
STORE_TAG R5, tnumber
|
STORE_TAG R5, tnumber
|
||||||
CHECK_SAFE_ENV exit(4)
|
CHECK_SAFE_ENV exit(4)
|
||||||
CHECK_TAG R3, tnumber, exit(4)
|
CHECK_TAG R0, tnumber, exit(4)
|
||||||
%13 = LOAD_DOUBLE R3
|
%11 = LOAD_DOUBLE R0
|
||||||
STORE_VECTOR R2, %13, 2, 3
|
STORE_VECTOR R2, %11, 2, 3
|
||||||
STORE_TAG R2, tvector
|
STORE_TAG R2, tvector
|
||||||
JUMP_IF_FALSY R1, bb_bytecode_1, bb_3
|
JUMP_IF_FALSY R1, bb_bytecode_1, bb_3
|
||||||
bb_3:
|
bb_3:
|
||||||
CHECK_TAG R2, tvector, exit(8)
|
CHECK_TAG R2, tvector, exit(9)
|
||||||
%21 = LOAD_FLOAT R2, 0i
|
%19 = LOAD_FLOAT R2, 0i
|
||||||
%26 = LOAD_FLOAT R2, 4i
|
%24 = LOAD_FLOAT R2, 4i
|
||||||
%35 = ADD_NUM %21, %26
|
%33 = ADD_NUM %19, %24
|
||||||
STORE_DOUBLE R3, %35
|
STORE_DOUBLE R3, %33
|
||||||
STORE_TAG R3, tnumber
|
STORE_TAG R3, tnumber
|
||||||
INTERRUPT 13u
|
INTERRUPT 14u
|
||||||
RETURN R3, 1i
|
RETURN R3, 1i
|
||||||
bb_bytecode_1:
|
bb_bytecode_1:
|
||||||
CHECK_TAG R2, tvector, exit(14)
|
CHECK_TAG R2, tvector, exit(15)
|
||||||
%42 = LOAD_FLOAT R2, 8i
|
%40 = LOAD_FLOAT R2, 8i
|
||||||
STORE_DOUBLE R3, %42
|
STORE_DOUBLE R3, %40
|
||||||
STORE_TAG R3, tnumber
|
STORE_TAG R3, tnumber
|
||||||
INTERRUPT 16u
|
INTERRUPT 17u
|
||||||
RETURN R3, 1i
|
RETURN R3, 1i
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("FastcallTypeInferThroughUpvalue")
|
TEST_CASE("FastcallTypeInferThroughUpvalue")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileFastcall3, true}, {FFlag::LuauCodegenFastcall3, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local v = ...
|
local v = ...
|
||||||
|
@ -1040,48 +1000,45 @@ end
|
||||||
; function getsum($arg0, $arg1) line 4
|
; function getsum($arg0, $arg1) line 4
|
||||||
; U0: vector
|
; U0: vector
|
||||||
bb_bytecode_0:
|
bb_bytecode_0:
|
||||||
%0 = LOAD_TVALUE R0
|
|
||||||
STORE_TVALUE R3, %0
|
|
||||||
STORE_DOUBLE R4, 2
|
STORE_DOUBLE R4, 2
|
||||||
STORE_TAG R4, tnumber
|
STORE_TAG R4, tnumber
|
||||||
STORE_DOUBLE R5, 3
|
STORE_DOUBLE R5, 3
|
||||||
STORE_TAG R5, tnumber
|
STORE_TAG R5, tnumber
|
||||||
CHECK_SAFE_ENV exit(4)
|
CHECK_SAFE_ENV exit(4)
|
||||||
CHECK_TAG R3, tnumber, exit(4)
|
CHECK_TAG R0, tnumber, exit(4)
|
||||||
%13 = LOAD_DOUBLE R3
|
%11 = LOAD_DOUBLE R0
|
||||||
STORE_VECTOR R2, %13, 2, 3
|
STORE_VECTOR R2, %11, 2, 3
|
||||||
STORE_TAG R2, tvector
|
STORE_TAG R2, tvector
|
||||||
SET_UPVALUE U0, R2, tvector
|
SET_UPVALUE U0, R2, tvector
|
||||||
JUMP_IF_FALSY R1, bb_bytecode_1, bb_3
|
JUMP_IF_FALSY R1, bb_bytecode_1, bb_3
|
||||||
bb_3:
|
bb_3:
|
||||||
GET_UPVALUE R4, U0
|
GET_UPVALUE R4, U0
|
||||||
CHECK_TAG R4, tvector, exit(10)
|
CHECK_TAG R4, tvector, exit(11)
|
||||||
%23 = LOAD_FLOAT R4, 0i
|
%21 = LOAD_FLOAT R4, 0i
|
||||||
STORE_DOUBLE R3, %23
|
STORE_DOUBLE R3, %21
|
||||||
STORE_TAG R3, tnumber
|
STORE_TAG R3, tnumber
|
||||||
GET_UPVALUE R5, U0
|
GET_UPVALUE R5, U0
|
||||||
CHECK_TAG R5, tvector, exit(13)
|
CHECK_TAG R5, tvector, exit(14)
|
||||||
%29 = LOAD_FLOAT R5, 4i
|
%27 = LOAD_FLOAT R5, 4i
|
||||||
%38 = ADD_NUM %23, %29
|
%36 = ADD_NUM %21, %27
|
||||||
STORE_DOUBLE R2, %38
|
STORE_DOUBLE R2, %36
|
||||||
STORE_TAG R2, tnumber
|
STORE_TAG R2, tnumber
|
||||||
INTERRUPT 16u
|
INTERRUPT 17u
|
||||||
RETURN R2, 1i
|
RETURN R2, 1i
|
||||||
bb_bytecode_1:
|
bb_bytecode_1:
|
||||||
GET_UPVALUE R3, U0
|
GET_UPVALUE R3, U0
|
||||||
CHECK_TAG R3, tvector, exit(18)
|
CHECK_TAG R3, tvector, exit(19)
|
||||||
%46 = LOAD_FLOAT R3, 8i
|
%44 = LOAD_FLOAT R3, 8i
|
||||||
STORE_DOUBLE R2, %46
|
STORE_DOUBLE R2, %44
|
||||||
STORE_TAG R2, tnumber
|
STORE_TAG R2, tnumber
|
||||||
INTERRUPT 20u
|
INTERRUPT 21u
|
||||||
RETURN R2, 1i
|
RETURN R2, 1i
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("LoadAndMoveTypePropagation")
|
TEST_CASE("LoadAndMoveTypePropagation")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function getsum(n)
|
local function getsum(n)
|
||||||
|
@ -1148,8 +1105,7 @@ bb_bytecode_4:
|
||||||
|
|
||||||
TEST_CASE("ArgumentTypeRefinement")
|
TEST_CASE("ArgumentTypeRefinement")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileFastcall3, true}, {FFlag::LuauCodegenFastcall3, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function getsum(x, y)
|
local function getsum(x, y)
|
||||||
|
@ -1164,31 +1120,28 @@ end
|
||||||
bb_bytecode_0:
|
bb_bytecode_0:
|
||||||
STORE_DOUBLE R3, 1
|
STORE_DOUBLE R3, 1
|
||||||
STORE_TAG R3, tnumber
|
STORE_TAG R3, tnumber
|
||||||
%2 = LOAD_TVALUE R1
|
|
||||||
STORE_TVALUE R4, %2
|
|
||||||
STORE_DOUBLE R5, 3
|
STORE_DOUBLE R5, 3
|
||||||
STORE_TAG R5, tnumber
|
STORE_TAG R5, tnumber
|
||||||
CHECK_SAFE_ENV exit(4)
|
CHECK_SAFE_ENV exit(4)
|
||||||
CHECK_TAG R4, tnumber, exit(4)
|
CHECK_TAG R1, tnumber, exit(4)
|
||||||
%14 = LOAD_DOUBLE R4
|
%12 = LOAD_DOUBLE R1
|
||||||
STORE_VECTOR R2, 1, %14, 3
|
STORE_VECTOR R2, 1, %12, 3
|
||||||
STORE_TAG R2, tvector
|
STORE_TAG R2, tvector
|
||||||
%18 = LOAD_TVALUE R2
|
%16 = LOAD_TVALUE R2
|
||||||
STORE_TVALUE R0, %18
|
STORE_TVALUE R0, %16
|
||||||
%22 = LOAD_FLOAT R0, 4i
|
%20 = LOAD_FLOAT R0, 4i
|
||||||
%27 = LOAD_FLOAT R0, 8i
|
%25 = LOAD_FLOAT R0, 8i
|
||||||
%36 = ADD_NUM %22, %27
|
%34 = ADD_NUM %20, %25
|
||||||
STORE_DOUBLE R2, %36
|
STORE_DOUBLE R2, %34
|
||||||
STORE_TAG R2, tnumber
|
STORE_TAG R2, tnumber
|
||||||
INTERRUPT 13u
|
INTERRUPT 14u
|
||||||
RETURN R2, 1i
|
RETURN R2, 1i
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("InlineFunctionType")
|
TEST_CASE("InlineFunctionType")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function inl(v: vector, s: number)
|
local function inl(v: vector, s: number)
|
||||||
|
@ -1236,8 +1189,7 @@ bb_bytecode_0:
|
||||||
|
|
||||||
TEST_CASE("ResolveTablePathTypes")
|
TEST_CASE("ResolveTablePathTypes")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1291,8 +1243,7 @@ bb_6:
|
||||||
|
|
||||||
TEST_CASE("ResolvableSimpleMath")
|
TEST_CASE("ResolvableSimpleMath")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
type Vertex = { p: vector, uv: vector, n: vector, t: vector, b: vector, h: number }
|
type Vertex = { p: vector, uv: vector, n: vector, t: vector, b: vector, h: number }
|
||||||
|
@ -1348,9 +1299,7 @@ end
|
||||||
|
|
||||||
TEST_CASE("ResolveVectorNamecalls")
|
TEST_CASE("ResolveVectorNamecalls")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true},
|
|
||||||
{FFlag::LuauCodegenAnalyzeHostVectorOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1414,8 +1363,7 @@ bb_6:
|
||||||
|
|
||||||
TEST_CASE("ImmediateTypeAnnotationHelp")
|
TEST_CASE("ImmediateTypeAnnotationHelp")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(arr, i)
|
local function foo(arr, i)
|
||||||
|
@ -1453,8 +1401,8 @@ bb_2:
|
||||||
|
|
||||||
TEST_CASE("UnaryTypeResolve")
|
TEST_CASE("UnaryTypeResolve")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileFastcall3, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
{FFlag::LuauCodegenFastcall3, true}};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
local function foo(a, b: vector, c)
|
local function foo(a, b: vector, c)
|
||||||
|
@ -1467,17 +1415,16 @@ end
|
||||||
R"(
|
R"(
|
||||||
; function foo(a, b, c) line 2
|
; function foo(a, b, c) line 2
|
||||||
; R1: vector [argument 'b']
|
; R1: vector [argument 'b']
|
||||||
; R3: boolean from 0 to 16 [local 'd']
|
; R3: boolean from 0 to 17 [local 'd']
|
||||||
; R4: vector from 1 to 16 [local 'e']
|
; R4: vector from 1 to 17 [local 'e']
|
||||||
; R5: number from 2 to 16 [local 'f']
|
; R5: number from 2 to 17 [local 'f']
|
||||||
; R7: vector from 13 to 15
|
; R7: vector from 14 to 16
|
||||||
)");
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("ForInManualAnnotation")
|
TEST_CASE("ForInManualAnnotation")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1572,8 +1519,7 @@ bb_12:
|
||||||
|
|
||||||
TEST_CASE("ForInAutoAnnotationIpairs")
|
TEST_CASE("ForInAutoAnnotationIpairs")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1600,8 +1546,7 @@ end
|
||||||
|
|
||||||
TEST_CASE("ForInAutoAnnotationPairs")
|
TEST_CASE("ForInAutoAnnotationPairs")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1628,8 +1573,7 @@ end
|
||||||
|
|
||||||
TEST_CASE("ForInAutoAnnotationGeneric")
|
TEST_CASE("ForInAutoAnnotationGeneric")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
type Vertex = {pos: vector, normal: vector}
|
type Vertex = {pos: vector, normal: vector}
|
||||||
|
@ -1661,8 +1605,7 @@ TEST_CASE("CustomUserdataTypesTemp")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, false},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, false},
|
|
||||||
{FFlag::LuauLoadUserdataInfo, true}};
|
{FFlag::LuauLoadUserdataInfo, true}};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
|
@ -1683,8 +1626,7 @@ TEST_CASE("CustomUserdataTypes")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
|
||||||
{FFlag::LuauLoadUserdataInfo, true}};
|
{FFlag::LuauLoadUserdataInfo, true}};
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenHeader(R"(
|
CHECK_EQ("\n" + getCodegenHeader(R"(
|
||||||
|
@ -1705,9 +1647,8 @@ TEST_CASE("CustomUserdataPropertyAccess")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(v: vec2)
|
local function foo(v: vec2)
|
||||||
|
@ -1742,9 +1683,8 @@ TEST_CASE("CustomUserdataPropertyAccess2")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: mat3)
|
local function foo(a: mat3)
|
||||||
|
@ -1781,10 +1721,8 @@ TEST_CASE("CustomUserdataNamecall1")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true},
|
|
||||||
{FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: vec2, b: vec2)
|
local function foo(a: vec2, b: vec2)
|
||||||
|
@ -1830,10 +1768,9 @@ TEST_CASE("CustomUserdataNamecall2")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true}, {FFlag::LuauCodegenUserdataOps, true},
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenAnalyzeHostVectorOps, true},
|
{FFlag::LuauCodegenUserdataAlloc, true}};
|
||||||
{FFlag::LuauCodegenUserdataOps, true}, {FFlag::LuauCodegenUserdataAlloc, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: vec2, b: vec2)
|
local function foo(a: vec2, b: vec2)
|
||||||
|
@ -1882,9 +1819,8 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: mat3, b: mat3)
|
local function foo(a: mat3, b: mat3)
|
||||||
|
@ -1916,9 +1852,8 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow2")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: mat3)
|
local function foo(a: mat3)
|
||||||
|
@ -1948,9 +1883,8 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow3")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: sequence)
|
local function foo(a: sequence)
|
||||||
|
@ -1980,10 +1914,8 @@ TEST_CASE("CustomUserdataMetamethod")
|
||||||
if (!Luau::CodeGen::isSupported())
|
if (!Luau::CodeGen::isSupported())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
|
ScopedFastFlag sffs[]{{FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
||||||
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCompileUserdataInfo, true},
|
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}, {FFlag::LuauCodegenUserdataAlloc, true}};
|
||||||
{FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}, {FFlag::LuauCodegenUserdataOps, true},
|
|
||||||
{FFlag::LuauCodegenUserdataAlloc, true}};
|
|
||||||
|
|
||||||
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
CHECK_EQ("\n" + getCodegenAssembly(R"(
|
||||||
local function foo(a: vec2, b: vec2, c: vec2)
|
local function foo(a: vec2, b: vec2, c: vec2)
|
||||||
|
|
|
@ -8,6 +8,9 @@
|
||||||
#include "doctest.h"
|
#include "doctest.h"
|
||||||
|
|
||||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution);
|
||||||
|
LUAU_FASTFLAG(LuauAttributeSyntax);
|
||||||
|
LUAU_FASTFLAG(LuauNativeAttribute);
|
||||||
|
LUAU_FASTFLAG(LintRedundantNativeAttribute);
|
||||||
|
|
||||||
using namespace Luau;
|
using namespace Luau;
|
||||||
|
|
||||||
|
@ -1955,4 +1958,32 @@ local _ = a <= (b == 0)
|
||||||
CHECK_EQ(result.warnings[4].text, "X <= Y <= Z is equivalent to (X <= Y) <= Z; did you mean X <= Y and Y <= Z?");
|
CHECK_EQ(result.warnings[4].text, "X <= Y <= Z is equivalent to (X <= Y) <= Z; did you mean X <= Y and Y <= Z?");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "RedundantNativeAttribute")
|
||||||
|
{
|
||||||
|
ScopedFastFlag sff[] = {{FFlag::LuauAttributeSyntax, true}, {FFlag::LuauNativeAttribute, true}, {FFlag::LintRedundantNativeAttribute, true}};
|
||||||
|
|
||||||
|
LintResult result = lint(R"(
|
||||||
|
--!native
|
||||||
|
|
||||||
|
@native
|
||||||
|
local function f(a)
|
||||||
|
@native
|
||||||
|
local function g(b)
|
||||||
|
return (a + b)
|
||||||
|
end
|
||||||
|
return g
|
||||||
|
end
|
||||||
|
|
||||||
|
f(3)(4)
|
||||||
|
)");
|
||||||
|
|
||||||
|
REQUIRE(2 == result.warnings.size());
|
||||||
|
|
||||||
|
CHECK_EQ(result.warnings[0].text, "native attribute on a function is redundant in a native module; consider removing it");
|
||||||
|
CHECK_EQ(result.warnings[0].location, Location(Position(3, 0), Position(3, 7)));
|
||||||
|
|
||||||
|
CHECK_EQ(result.warnings[1].text, "native attribute on a function is redundant in a native module; consider removing it");
|
||||||
|
CHECK_EQ(result.warnings[1].location, Location(Position(5, 4), Position(5, 11)));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -2298,10 +2298,14 @@ end
|
||||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
{
|
{
|
||||||
LUAU_REQUIRE_ERROR_COUNT(4, result);
|
LUAU_REQUIRE_ERROR_COUNT(4, result);
|
||||||
CHECK(toString(result.errors[0]) == "Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
CHECK(toString(result.errors[0]) ==
|
||||||
CHECK(toString(result.errors[1]) == "Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
"Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
||||||
CHECK(toString(result.errors[2]) == "Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
CHECK(toString(result.errors[1]) ==
|
||||||
CHECK(toString(result.errors[3]) == "Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
"Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
||||||
|
CHECK(toString(result.errors[2]) ==
|
||||||
|
"Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
||||||
|
CHECK(toString(result.errors[3]) ==
|
||||||
|
"Operator '-' could not be applied to operands of types unknown and number; there is no corresponding overload for __sub");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -2719,7 +2723,6 @@ end
|
||||||
_ = _,{}
|
_ = _,{}
|
||||||
|
|
||||||
)");
|
)");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1230,4 +1230,45 @@ TEST_CASE_FIXTURE(Fixture, "table_containing_non_final_type_is_erroneously_cache
|
||||||
CHECK(n1 == n2);
|
CHECK(n1 == n2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is doable with the new solver, but there are some problems we have to work out first.
|
||||||
|
// CLI-111113
|
||||||
|
TEST_CASE_FIXTURE(Fixture, "we_cannot_infer_functions_that_return_inconsistently")
|
||||||
|
{
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
function find_first<T>(tbl: {T}, el)
|
||||||
|
for i, e in tbl do
|
||||||
|
if e == el then
|
||||||
|
return i
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
// This #if block describes what should happen.
|
||||||
|
LUAU_CHECK_NO_ERRORS(result);
|
||||||
|
|
||||||
|
// The second argument has type unknown because the == operator does not
|
||||||
|
// constrain the type of el.
|
||||||
|
CHECK("<T>({T}, unknown) -> number?" == toString(requireType("find_first")));
|
||||||
|
#else
|
||||||
|
// This is what actually happens right now.
|
||||||
|
|
||||||
|
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
{
|
||||||
|
LUAU_CHECK_ERROR_COUNT(2, result);
|
||||||
|
|
||||||
|
// The second argument should be unknown. CLI-111111
|
||||||
|
CHECK("<T>({T}, 'b) -> number" == toString(requireType("find_first")));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
LUAU_CHECK_ERROR_COUNT(1, result);
|
||||||
|
|
||||||
|
CHECK("<T, b>({T}, b) -> number" == toString(requireType("find_first")));
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -21,12 +21,30 @@ LUAU_FASTFLAG(LuauInstantiateInSubtyping);
|
||||||
LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls);
|
LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls);
|
||||||
LUAU_FASTFLAG(LuauFixIndexerSubtypingOrdering);
|
LUAU_FASTFLAG(LuauFixIndexerSubtypingOrdering);
|
||||||
LUAU_FASTFLAG(DebugLuauSharedSelf);
|
LUAU_FASTFLAG(DebugLuauSharedSelf);
|
||||||
LUAU_FASTFLAG(LuauMetatableInstantiationCloneCheck);
|
|
||||||
|
|
||||||
LUAU_DYNAMIC_FASTFLAG(LuauImproveNonFunctionCallError)
|
LUAU_DYNAMIC_FASTFLAG(LuauImproveNonFunctionCallError)
|
||||||
|
|
||||||
TEST_SUITE_BEGIN("TableTests");
|
TEST_SUITE_BEGIN("TableTests");
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(BuiltinsFixture, "generalization_shouldnt_seal_table_in_len_family_fn")
|
||||||
|
{
|
||||||
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
|
return;
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
local t = {}
|
||||||
|
for i = #t, 2, -1 do
|
||||||
|
t[i] = t[i + 1]
|
||||||
|
end
|
||||||
|
)");
|
||||||
|
|
||||||
|
LUAU_REQUIRE_NO_ERRORS(result);
|
||||||
|
const TableType* tType = get<TableType>(requireType("t"));
|
||||||
|
REQUIRE(tType != nullptr);
|
||||||
|
REQUIRE(tType->indexer);
|
||||||
|
CHECK_EQ(tType->indexer->indexType, builtinTypes->numberType);
|
||||||
|
CHECK_EQ(follow(tType->indexer->indexResultType), builtinTypes->unknownType);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(BuiltinsFixture, "LUAU_ASSERT_arg_exprs_doesnt_trigger_assert")
|
TEST_CASE_FIXTURE(BuiltinsFixture, "LUAU_ASSERT_arg_exprs_doesnt_trigger_assert")
|
||||||
{
|
{
|
||||||
CheckResult result = check(R"(
|
CheckResult result = check(R"(
|
||||||
|
@ -4150,9 +4168,7 @@ TEST_CASE_FIXTURE(Fixture, "write_annotations_are_unsupported_even_with_the_new_
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(Fixture, "read_and_write_only_table_properties_are_unsupported")
|
TEST_CASE_FIXTURE(Fixture, "read_and_write_only_table_properties_are_unsupported")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sff[] = {
|
ScopedFastFlag sff[] = {{FFlag::DebugLuauDeferredConstraintResolution, false}};
|
||||||
{FFlag::DebugLuauDeferredConstraintResolution, false}
|
|
||||||
};
|
|
||||||
|
|
||||||
CheckResult result = check(R"(
|
CheckResult result = check(R"(
|
||||||
type W = {read x: number}
|
type W = {read x: number}
|
||||||
|
@ -4176,9 +4192,7 @@ TEST_CASE_FIXTURE(Fixture, "read_and_write_only_table_properties_are_unsupported
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(Fixture, "read_ond_write_only_indexers_are_unsupported")
|
TEST_CASE_FIXTURE(Fixture, "read_ond_write_only_indexers_are_unsupported")
|
||||||
{
|
{
|
||||||
ScopedFastFlag sff[] = {
|
ScopedFastFlag sff[] = {{FFlag::DebugLuauDeferredConstraintResolution, false}};
|
||||||
{FFlag::DebugLuauDeferredConstraintResolution, false}
|
|
||||||
};
|
|
||||||
|
|
||||||
CheckResult result = check(R"(
|
CheckResult result = check(R"(
|
||||||
type T = {read [string]: number}
|
type T = {read [string]: number}
|
||||||
|
@ -4198,9 +4212,7 @@ TEST_CASE_FIXTURE(Fixture, "table_writes_introduce_write_properties")
|
||||||
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ScopedFastFlag sff[] = {
|
ScopedFastFlag sff[] = {{FFlag::DebugLuauDeferredConstraintResolution, true}};
|
||||||
{FFlag::DebugLuauDeferredConstraintResolution, true}
|
|
||||||
};
|
|
||||||
|
|
||||||
CheckResult result = check(R"(
|
CheckResult result = check(R"(
|
||||||
function oc(player, speaker)
|
function oc(player, speaker)
|
||||||
|
@ -4354,8 +4366,6 @@ TEST_CASE_FIXTURE(Fixture, "mymovie_read_write_tables_bug_2")
|
||||||
|
|
||||||
TEST_CASE_FIXTURE(BuiltinsFixture, "instantiated_metatable_frozen_table_clone_mutation")
|
TEST_CASE_FIXTURE(BuiltinsFixture, "instantiated_metatable_frozen_table_clone_mutation")
|
||||||
{
|
{
|
||||||
ScopedFastFlag luauMetatableInstantiationCloneCheck{FFlag::LuauMetatableInstantiationCloneCheck, true};
|
|
||||||
|
|
||||||
fileResolver.source["game/worker"] = R"(
|
fileResolver.source["game/worker"] = R"(
|
||||||
type WorkerImpl<T..., R...> = {
|
type WorkerImpl<T..., R...> = {
|
||||||
destroy: (self: Worker<T..., R...>) -> boolean,
|
destroy: (self: Worker<T..., R...>) -> boolean,
|
||||||
|
@ -4533,8 +4543,24 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "table_literal_inference_assert")
|
||||||
end;
|
end;
|
||||||
}
|
}
|
||||||
)");
|
)");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_CASE_FIXTURE(BuiltinsFixture, "metatable_table_assertion_crash")
|
||||||
|
{
|
||||||
|
CheckResult result = check(R"(
|
||||||
|
local NexusInstance = {}
|
||||||
|
function NexusInstance:__InitMetaMethods(): ()
|
||||||
|
local Metatable = {}
|
||||||
|
local OriginalIndexTable = getmetatable(self).__index
|
||||||
|
setmetatable(self, Metatable)
|
||||||
|
|
||||||
|
Metatable.__newindex = function(_, Index: string, Value: any): ()
|
||||||
|
--Return if the new and old values are the same.
|
||||||
|
if self[Index] == Value then
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
)");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_SUITE_END();
|
TEST_SUITE_END();
|
||||||
|
|
|
@ -1536,7 +1536,7 @@ TEST_CASE_FIXTURE(Fixture, "typeof_cannot_refine_builtin_alias")
|
||||||
|
|
||||||
freeze(arena);
|
freeze(arena);
|
||||||
|
|
||||||
(void) check(R"(
|
(void)check(R"(
|
||||||
function foo(x)
|
function foo(x)
|
||||||
if typeof(x) == 'GlobalTable' then
|
if typeof(x) == 'GlobalTable' then
|
||||||
end
|
end
|
||||||
|
|
|
@ -72,6 +72,7 @@ for _, b in pairs(c) do
|
||||||
assert(bit32.bxor(b, b) == 0)
|
assert(bit32.bxor(b, b) == 0)
|
||||||
assert(bit32.bxor(b, 0) == b)
|
assert(bit32.bxor(b, 0) == b)
|
||||||
assert(bit32.bxor(b, b, b) == b)
|
assert(bit32.bxor(b, b, b) == b)
|
||||||
|
assert(bit32.bxor(b, b, b, b) == 0)
|
||||||
assert(bit32.bnot(b) ~= b)
|
assert(bit32.bnot(b) ~= b)
|
||||||
assert(bit32.bnot(bit32.bnot(b)) == b)
|
assert(bit32.bnot(bit32.bnot(b)) == b)
|
||||||
assert(bit32.bnot(b) == 2^32 - 1 - b)
|
assert(bit32.bnot(b) == 2^32 - 1 - b)
|
||||||
|
|
|
@ -268,10 +268,33 @@ assert(math.min(1) == 1)
|
||||||
assert(math.min(1, 2) == 1)
|
assert(math.min(1, 2) == 1)
|
||||||
assert(math.min(1, 2, -1) == -1)
|
assert(math.min(1, 2, -1) == -1)
|
||||||
assert(math.min(1, -1, 2) == -1)
|
assert(math.min(1, -1, 2) == -1)
|
||||||
|
assert(math.min(1, -1, 2, -2) == -2)
|
||||||
assert(math.max(1) == 1)
|
assert(math.max(1) == 1)
|
||||||
assert(math.max(1, 2) == 2)
|
assert(math.max(1, 2) == 2)
|
||||||
assert(math.max(1, 2, -1) == 2)
|
assert(math.max(1, 2, -1) == 2)
|
||||||
assert(math.max(1, -1, 2) == 2)
|
assert(math.max(1, -1, 2) == 2)
|
||||||
|
assert(math.max(1, -1, 2, -2) == 2)
|
||||||
|
|
||||||
|
local ma, mb, mc, md
|
||||||
|
|
||||||
|
assert(pcall(function()
|
||||||
|
ma = 1
|
||||||
|
mb = -1
|
||||||
|
mc = 2
|
||||||
|
md = -2
|
||||||
|
end) == true)
|
||||||
|
|
||||||
|
-- min/max without contant-folding
|
||||||
|
assert(math.min(ma) == 1)
|
||||||
|
assert(math.min(ma, mc) == 1)
|
||||||
|
assert(math.min(ma, mc, mb) == -1)
|
||||||
|
assert(math.min(ma, mb, mc) == -1)
|
||||||
|
assert(math.min(ma, mb, mc, md) == -2)
|
||||||
|
assert(math.max(ma) == 1)
|
||||||
|
assert(math.max(ma, mc) == 2)
|
||||||
|
assert(math.max(ma, mc, mb) == 2)
|
||||||
|
assert(math.max(ma, mb, mc) == 2)
|
||||||
|
assert(math.max(ma, mb, mc, md) == 2)
|
||||||
|
|
||||||
-- noise
|
-- noise
|
||||||
assert(math.noise(0.5) == 0)
|
assert(math.noise(0.5) == 0)
|
||||||
|
|
Loading…
Reference in a new issue