Sync to upstream/release/624 (#1245)

# What's changed?

* Optimize table.maxn.  This function is now 5-14x faster
* Reserve Luau stack space for error message.

## New Solver

* Globals can be type-stated, but only if they are already in scope
* Fix a stack overflow that could occur when normalizing certain kinds
of recursive unions of intersections (of unions of intersections...)
* Fix an assertion failure that would trigger when the __iter metamethod
has a bad signature

## Native Codegen

* Type propagation and temporary register type hints
* Direct vector property access should only happen for names of right
length
* BytecodeAnalysis will only predict that some of the vector value
fields are numbers

---

## Internal Contributors

Co-authored-by: Alexander McCord <amccord@roblox.com>
Co-authored-by: Andy Friesen <afriesen@roblox.com>
Co-authored-by: Aviral Goel <agoel@roblox.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
This commit is contained in:
Andy Friesen 2024-05-03 13:17:51 -07:00 committed by GitHub
parent 7edd58afed
commit 8a64cb8b73
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 1656 additions and 115 deletions

View file

@ -395,6 +395,7 @@ public:
TypeId negate(TypeId there); TypeId negate(TypeId there);
void subtractPrimitive(NormalizedType& here, TypeId ty); void subtractPrimitive(NormalizedType& here, TypeId ty);
void subtractSingleton(NormalizedType& here, TypeId ty); void subtractSingleton(NormalizedType& here, TypeId ty);
NormalizationResult intersectNormalWithNegationTy(TypeId toNegate, NormalizedType& intersect, bool useDeprecated = false);
// ------- Normalizing intersections // ------- Normalizing intersections
TypeId intersectionOfTops(TypeId here, TypeId there); TypeId intersectionOfTops(TypeId here, TypeId there);

View file

@ -1900,10 +1900,7 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprGlobal* globa
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)}; return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
} }
else else
{
reportError(global->location, UnknownSymbol{global->name.value, UnknownSymbol::Binding});
return Inference{builtinTypes->errorRecoveryType()}; return Inference{builtinTypes->errorRecoveryType()};
}
} }
Inference ConstraintGenerator::checkIndexName(const ScopePtr& scope, const RefinementKey* key, AstExpr* indexee, const std::string& index, Location indexLocation) Inference ConstraintGenerator::checkIndexName(const ScopePtr& scope, const RefinementKey* key, AstExpr* indexee, const std::string& index, Location indexLocation)
@ -2453,7 +2450,12 @@ ConstraintGenerator::LValueBounds ConstraintGenerator::checkLValue(const ScopePt
{ {
std::optional<TypeId> annotatedTy = scope->lookup(Symbol{global->name}); std::optional<TypeId> annotatedTy = scope->lookup(Symbol{global->name});
if (annotatedTy) if (annotatedTy)
return {annotatedTy, arena->addType(BlockedType{})}; {
DefId def = dfg->getDef(global);
TypeId assignedTy = arena->addType(BlockedType{});
rootScope->lvalueTypes[def] = assignedTy;
return {annotatedTy, assignedTy};
}
else else
return {annotatedTy, std::nullopt}; return {annotatedTy, std::nullopt};
} }

View file

@ -1619,9 +1619,7 @@ std::pair<bool, std::optional<TypeId>> ConstraintSolver::tryDispatchSetIndexer(
{ {
if (tt->indexer) if (tt->indexer)
{ {
if (isBlocked(tt->indexer->indexType)) if (isBlocked(tt->indexer->indexResultType))
return {block(tt->indexer->indexType, constraint), std::nullopt};
else if (isBlocked(tt->indexer->indexResultType))
return {block(tt->indexer->indexResultType, constraint), std::nullopt}; return {block(tt->indexer->indexResultType, constraint), std::nullopt};
unify(constraint, indexType, tt->indexer->indexType); unify(constraint, indexType, tt->indexer->indexType);
@ -2014,10 +2012,14 @@ bool ConstraintSolver::tryDispatchIterableTable(TypeId iteratorTy, const Iterabl
if (std::optional<TypeId> instantiatedNextFn = instantiate(builtinTypes, arena, NotNull{&limits}, constraint->scope, nextFn)) if (std::optional<TypeId> instantiatedNextFn = instantiate(builtinTypes, arena, NotNull{&limits}, constraint->scope, nextFn))
{ {
const FunctionType* nextFn = get<FunctionType>(*instantiatedNextFn); const FunctionType* nextFn = get<FunctionType>(*instantiatedNextFn);
LUAU_ASSERT(nextFn);
const TypePackId nextRetPack = nextFn->retTypes;
pushConstraint(constraint->scope, constraint->location, UnpackConstraint{c.variables, nextRetPack, /* resultIsLValue=*/true}); // If nextFn is nullptr, then the iterator function has an improper signature.
if (nextFn)
{
const TypePackId nextRetPack = nextFn->retTypes;
pushConstraint(constraint->scope, constraint->location, UnpackConstraint{c.variables, nextRetPack, /* resultIsLValue=*/true});
}
return true; return true;
} }
else else

View file

@ -20,6 +20,7 @@ LUAU_FASTFLAGVARIABLE(LuauNormalizeAwayUninhabitableTables, false)
LUAU_FASTFLAGVARIABLE(LuauFixNormalizeCaching, false); LUAU_FASTFLAGVARIABLE(LuauFixNormalizeCaching, false);
LUAU_FASTFLAGVARIABLE(LuauNormalizeNotUnknownIntersection, false); LUAU_FASTFLAGVARIABLE(LuauNormalizeNotUnknownIntersection, false);
LUAU_FASTFLAGVARIABLE(LuauFixCyclicUnionsOfIntersections, false); LUAU_FASTFLAGVARIABLE(LuauFixCyclicUnionsOfIntersections, false);
LUAU_FASTFLAGVARIABLE(LuauFixReduceStackPressure, false);
// This could theoretically be 2000 on amd64, but x86 requires this. // This could theoretically be 2000 on amd64, but x86 requires this.
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200); LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
@ -36,6 +37,11 @@ static bool fixCyclicUnionsOfIntersections()
return FFlag::LuauFixCyclicUnionsOfIntersections || FFlag::DebugLuauDeferredConstraintResolution; return FFlag::LuauFixCyclicUnionsOfIntersections || FFlag::DebugLuauDeferredConstraintResolution;
} }
static bool fixReduceStackPressure()
{
return FFlag::LuauFixReduceStackPressure || FFlag::DebugLuauDeferredConstraintResolution;
}
namespace Luau namespace Luau
{ {
@ -45,6 +51,14 @@ static bool normalizeAwayUninhabitableTables()
return FFlag::LuauNormalizeAwayUninhabitableTables || FFlag::DebugLuauDeferredConstraintResolution; return FFlag::LuauNormalizeAwayUninhabitableTables || FFlag::DebugLuauDeferredConstraintResolution;
} }
static bool shouldEarlyExit(NormalizationResult res)
{
// if res is hit limits, return control flow
if (res == NormalizationResult::HitLimits || res == NormalizationResult::False)
return true;
return false;
}
TypeIds::TypeIds(std::initializer_list<TypeId> tys) TypeIds::TypeIds(std::initializer_list<TypeId> tys)
{ {
for (TypeId ty : tys) for (TypeId ty : tys)
@ -1729,6 +1743,27 @@ bool Normalizer::withinResourceLimits()
return true; return true;
} }
NormalizationResult Normalizer::intersectNormalWithNegationTy(TypeId toNegate, NormalizedType& intersect, bool useDeprecated)
{
std::optional<NormalizedType> negated;
if (useDeprecated)
{
const NormalizedType* normal = DEPRECATED_normalize(toNegate);
negated = negateNormal(*normal);
}
else
{
std::shared_ptr<const NormalizedType> normal = normalize(toNegate);
negated = negateNormal(*normal);
}
if (!negated)
return NormalizationResult::False;
intersectNormals(intersect, *negated);
return NormalizationResult::True;
}
// See above for an explaination of `ignoreSmallerTyvars`. // See above for an explaination of `ignoreSmallerTyvars`.
NormalizationResult Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes, int ignoreSmallerTyvars) NormalizationResult Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes, int ignoreSmallerTyvars)
{ {
@ -2541,8 +2576,8 @@ std::optional<TypeId> Normalizer::intersectionOfTables(TypeId here, TypeId there
state = tttv->state; state = tttv->state;
TypeLevel level = max(httv->level, tttv->level); TypeLevel level = max(httv->level, tttv->level);
TableType result{state, level};
std::unique_ptr<TableType> result = nullptr;
bool hereSubThere = true; bool hereSubThere = true;
bool thereSubHere = true; bool thereSubHere = true;
@ -2563,8 +2598,18 @@ std::optional<TypeId> Normalizer::intersectionOfTables(TypeId here, TypeId there
if (tprop.readTy.has_value()) if (tprop.readTy.has_value())
{ {
// if the intersection of the read types of a property is uninhabited, the whole table is `never`. // if the intersection of the read types of a property is uninhabited, the whole table is `never`.
if (normalizeAwayUninhabitableTables() && NormalizationResult::False == isIntersectionInhabited(*hprop.readTy, *tprop.readTy)) if (fixReduceStackPressure())
return {builtinTypes->neverType}; {
if (normalizeAwayUninhabitableTables() &&
NormalizationResult::True != isIntersectionInhabited(*hprop.readTy, *tprop.readTy))
return {builtinTypes->neverType};
}
else
{
if (normalizeAwayUninhabitableTables() &&
NormalizationResult::False == isIntersectionInhabited(*hprop.readTy, *tprop.readTy))
return {builtinTypes->neverType};
}
TypeId ty = simplifyIntersection(builtinTypes, NotNull{arena}, *hprop.readTy, *tprop.readTy).result; TypeId ty = simplifyIntersection(builtinTypes, NotNull{arena}, *hprop.readTy, *tprop.readTy).result;
prop.readTy = ty; prop.readTy = ty;
@ -2614,14 +2659,21 @@ std::optional<TypeId> Normalizer::intersectionOfTables(TypeId here, TypeId there
// TODO: string indexers // TODO: string indexers
if (prop.readTy || prop.writeTy) if (prop.readTy || prop.writeTy)
result.props[name] = prop; {
if (!result.get())
result = std::make_unique<TableType>(TableType{state, level});
result->props[name] = prop;
}
} }
for (const auto& [name, tprop] : tttv->props) for (const auto& [name, tprop] : tttv->props)
{ {
if (httv->props.count(name) == 0) if (httv->props.count(name) == 0)
{ {
result.props[name] = tprop; if (!result.get())
result = std::make_unique<TableType>(TableType{state, level});
result->props[name] = tprop;
hereSubThere = false; hereSubThere = false;
} }
} }
@ -2631,18 +2683,24 @@ std::optional<TypeId> Normalizer::intersectionOfTables(TypeId here, TypeId there
// TODO: What should intersection of indexes be? // TODO: What should intersection of indexes be?
TypeId index = unionType(httv->indexer->indexType, tttv->indexer->indexType); TypeId index = unionType(httv->indexer->indexType, tttv->indexer->indexType);
TypeId indexResult = intersectionType(httv->indexer->indexResultType, tttv->indexer->indexResultType); TypeId indexResult = intersectionType(httv->indexer->indexResultType, tttv->indexer->indexResultType);
result.indexer = {index, indexResult}; if (!result.get())
result = std::make_unique<TableType>(TableType{state, level});
result->indexer = {index, indexResult};
hereSubThere &= (httv->indexer->indexType == index) && (httv->indexer->indexResultType == indexResult); hereSubThere &= (httv->indexer->indexType == index) && (httv->indexer->indexResultType == indexResult);
thereSubHere &= (tttv->indexer->indexType == index) && (tttv->indexer->indexResultType == indexResult); thereSubHere &= (tttv->indexer->indexType == index) && (tttv->indexer->indexResultType == indexResult);
} }
else if (httv->indexer) else if (httv->indexer)
{ {
result.indexer = httv->indexer; if (!result.get())
result = std::make_unique<TableType>(TableType{state, level});
result->indexer = httv->indexer;
thereSubHere = false; thereSubHere = false;
} }
else if (tttv->indexer) else if (tttv->indexer)
{ {
result.indexer = tttv->indexer; if (!result.get())
result = std::make_unique<TableType>(TableType{state, level});
result->indexer = tttv->indexer;
hereSubThere = false; hereSubThere = false;
} }
@ -2652,7 +2710,12 @@ std::optional<TypeId> Normalizer::intersectionOfTables(TypeId here, TypeId there
else if (thereSubHere) else if (thereSubHere)
table = ttable; table = ttable;
else else
table = arena->addType(std::move(result)); {
if (result.get())
table = arena->addType(std::move(*result));
else
table = arena->addType(TableType{state, level});
}
if (tmtable && hmtable) if (tmtable && hmtable)
{ {
@ -3150,19 +3213,15 @@ NormalizationResult Normalizer::intersectNormalWithTy(NormalizedType& here, Type
{ {
if (fixNormalizeCaching()) if (fixNormalizeCaching())
{ {
std::shared_ptr<const NormalizedType> normal = normalize(t); NormalizationResult res = intersectNormalWithNegationTy(t, here);
std::optional<NormalizedType> negated = negateNormal(*normal); if (shouldEarlyExit(res))
if (!negated) return res;
return NormalizationResult::False;
intersectNormals(here, *negated);
} }
else else
{ {
const NormalizedType* normal = DEPRECATED_normalize(t); NormalizationResult res = intersectNormalWithNegationTy(t, here, /* useDeprecated */ true);
std::optional<NormalizedType> negated = negateNormal(*normal); if (shouldEarlyExit(res))
if (!negated) return res;
return NormalizationResult::False;
intersectNormals(here, *negated);
} }
} }
else if (const UnionType* itv = get<UnionType>(t)) else if (const UnionType* itv = get<UnionType>(t))
@ -3171,11 +3230,9 @@ NormalizationResult Normalizer::intersectNormalWithTy(NormalizedType& here, Type
{ {
for (TypeId part : itv->options) for (TypeId part : itv->options)
{ {
std::shared_ptr<const NormalizedType> normalPart = normalize(part); NormalizationResult res = intersectNormalWithNegationTy(part, here);
std::optional<NormalizedType> negated = negateNormal(*normalPart); if (shouldEarlyExit(res))
if (!negated) return res;
return NormalizationResult::False;
intersectNormals(here, *negated);
} }
} }
else else
@ -3184,22 +3241,18 @@ NormalizationResult Normalizer::intersectNormalWithTy(NormalizedType& here, Type
{ {
for (TypeId part : itv->options) for (TypeId part : itv->options)
{ {
std::shared_ptr<const NormalizedType> normalPart = normalize(part); NormalizationResult res = intersectNormalWithNegationTy(part, here);
std::optional<NormalizedType> negated = negateNormal(*normalPart); if (shouldEarlyExit(res))
if (!negated) return res;
return NormalizationResult::False;
intersectNormals(here, *negated);
} }
} }
else else
{ {
for (TypeId part : itv->options) for (TypeId part : itv->options)
{ {
const NormalizedType* normalPart = DEPRECATED_normalize(part); NormalizationResult res = intersectNormalWithNegationTy(part, here, /* useDeprecated */ true);
std::optional<NormalizedType> negated = negateNormal(*normalPart); if (shouldEarlyExit(res))
if (!negated) return res;
return NormalizationResult::False;
intersectNormals(here, *negated);
} }
} }
} }

View file

@ -1280,7 +1280,9 @@ struct TypeChecker2
void visit(AstExprGlobal* expr) void visit(AstExprGlobal* expr)
{ {
// TODO! NotNull<Scope> scope = stack.back();
if (!scope->lookup(expr->name))
reportError(UnknownSymbol{expr->name.value, UnknownSymbol::Binding}, expr->location);
} }
void visit(AstExprVarargs* expr) void visit(AstExprVarargs* expr)

View file

@ -317,6 +317,7 @@ static bool compileFile(const char* name, CompileFormat format, Luau::CodeGen::A
{ {
options.includeAssembly = format != CompileFormat::CodegenIr; options.includeAssembly = format != CompileFormat::CodegenIr;
options.includeIr = format != CompileFormat::CodegenAsm; options.includeIr = format != CompileFormat::CodegenAsm;
options.includeIrTypes = format != CompileFormat::CodegenAsm;
options.includeOutlinedCode = format == CompileFormat::CodegenVerbose; options.includeOutlinedCode = format == CompileFormat::CodegenVerbose;
} }

View file

@ -229,6 +229,7 @@ if(LUAU_BUILD_TESTS)
target_link_libraries(Luau.UnitTest PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen) target_link_libraries(Luau.UnitTest PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen)
target_compile_options(Luau.Conformance PRIVATE ${LUAU_OPTIONS}) target_compile_options(Luau.Conformance PRIVATE ${LUAU_OPTIONS})
target_compile_definitions(Luau.Conformance PRIVATE DOCTEST_CONFIG_DOUBLE_STRINGIFY)
target_include_directories(Luau.Conformance PRIVATE extern) target_include_directories(Luau.Conformance PRIVATE extern)
target_link_libraries(Luau.Conformance PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen Luau.VM) target_link_libraries(Luau.Conformance PRIVATE Luau.Analysis Luau.Compiler Luau.CodeGen Luau.VM)
if(CMAKE_SYSTEM_NAME MATCHES "Android|iOS") if(CMAKE_SYSTEM_NAME MATCHES "Android|iOS")

View file

@ -40,8 +40,12 @@ enum class CodeGenCompilationResult
CodeGenAssemblerFinalizationFailure = 7, // Failure during assembler finalization CodeGenAssemblerFinalizationFailure = 7, // Failure during assembler finalization
CodeGenLoweringFailure = 8, // Lowering failed CodeGenLoweringFailure = 8, // Lowering failed
AllocationFailed = 9, // Native codegen failed due to an allocation error AllocationFailed = 9, // Native codegen failed due to an allocation error
Count = 10,
}; };
std::string toString(const CodeGenCompilationResult& result);
struct ProtoCompilationFailure struct ProtoCompilationFailure
{ {
CodeGenCompilationResult result = CodeGenCompilationResult::Success; CodeGenCompilationResult result = CodeGenCompilationResult::Success;

View file

@ -6,6 +6,9 @@
#include "Luau/IrUtils.h" #include "Luau/IrUtils.h"
#include "lobject.h" #include "lobject.h"
#include "lstate.h"
#include <algorithm>
#include <algorithm> #include <algorithm>
@ -13,6 +16,7 @@ LUAU_FASTFLAG(LuauCodegenDirectUserdataFlow)
LUAU_FASTFLAG(LuauLoadTypeInfo) // Because new VM typeinfo load changes the format used by Codegen, same flag is used LUAU_FASTFLAG(LuauLoadTypeInfo) // Because new VM typeinfo load changes the format used by Codegen, same flag is used
LUAU_FASTFLAGVARIABLE(LuauCodegenTypeInfo, false) // New analysis is flagged separately LUAU_FASTFLAGVARIABLE(LuauCodegenTypeInfo, false) // New analysis is flagged separately
LUAU_FASTFLAG(LuauTypeInfoLookupImprovement) LUAU_FASTFLAG(LuauTypeInfoLookupImprovement)
LUAU_FASTFLAGVARIABLE(LuauCodegenVectorMispredictFix, false)
namespace Luau namespace Luau
{ {
@ -771,10 +775,30 @@ void analyzeBytecodeTypes(IrFunction& function)
regTags[ra] = LBC_TYPE_ANY; regTags[ra] = LBC_TYPE_ANY;
// Assuming that vector component is being indexed if (FFlag::LuauCodegenVectorMispredictFix)
// TODO: check what key is used {
if (bcType.a == LBC_TYPE_VECTOR) if (bcType.a == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_NUMBER; {
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
if (str->len == 1)
{
// Same handling as LOP_GETTABLEKS block in lvmexecute.cpp - case-insensitive comparison with "X" / "Y" / "Z"
char ch = field[0] | ' ';
if (ch == 'x' || ch == 'y' || ch == 'z')
regTags[ra] = LBC_TYPE_NUMBER;
}
}
}
else
{
// Assuming that vector component is being indexed
// TODO: check what key is used
if (bcType.a == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_NUMBER;
}
bcType.result = regTags[ra]; bcType.result = regTags[ra];
break; break;

View file

@ -65,6 +65,38 @@ namespace Luau
namespace CodeGen namespace CodeGen
{ {
std::string toString(const CodeGenCompilationResult& result)
{
switch (result)
{
case CodeGenCompilationResult::Success:
return "Success";
case CodeGenCompilationResult::NothingToCompile:
return "NothingToCompile";
case CodeGenCompilationResult::NotNativeModule:
return "NotNativeModule";
case CodeGenCompilationResult::CodeGenNotInitialized:
return "CodeGenNotInitialized";
case CodeGenCompilationResult::CodeGenOverflowInstructionLimit:
return "CodeGenOverflowInstructionLimit";
case CodeGenCompilationResult::CodeGenOverflowBlockLimit:
return "CodeGenOverflowBlockLimit";
case CodeGenCompilationResult::CodeGenOverflowBlockInstructionLimit:
return "CodeGenOverflowBlockInstructionLimit";
case CodeGenCompilationResult::CodeGenAssemblerFinalizationFailure:
return "CodeGenAssemblerFinalizationFailure";
case CodeGenCompilationResult::CodeGenLoweringFailure:
return "CodeGenLoweringFailure";
case CodeGenCompilationResult::AllocationFailed:
return "AllocationFailed";
case CodeGenCompilationResult::Count:
return "Count";
}
CODEGEN_ASSERT(false);
return "";
}
static const Instruction kCodeEntryInsn = LOP_NATIVECALL; static const Instruction kCodeEntryInsn = LOP_NATIVECALL;
void* gPerfLogContext = nullptr; void* gPerfLogContext = nullptr;

View file

@ -13,12 +13,55 @@
#include "lapi.h" #include "lapi.h"
LUAU_FASTFLAG(LuauCodegenTypeInfo) LUAU_FASTFLAG(LuauCodegenTypeInfo)
LUAU_FASTFLAGVARIABLE(LuauCodegenIrTypeNames, false)
namespace Luau namespace Luau
{ {
namespace CodeGen namespace CodeGen
{ {
static const LocVar* tryFindLocal(const Proto* proto, int reg, int pcpos)
{
CODEGEN_ASSERT(FFlag::LuauCodegenIrTypeNames);
for (int i = 0; i < proto->sizelocvars; i++)
{
const LocVar& local = proto->locvars[i];
if (reg == local.reg && pcpos >= local.startpc && pcpos < local.endpc)
return &local;
}
return nullptr;
}
const char* tryFindLocalName(const Proto* proto, int reg, int pcpos)
{
CODEGEN_ASSERT(FFlag::LuauCodegenIrTypeNames);
const LocVar* var = tryFindLocal(proto, reg, pcpos);
if (var && var->varname)
return getstr(var->varname);
return nullptr;
}
const char* tryFindUpvalueName(const Proto* proto, int upval)
{
CODEGEN_ASSERT(FFlag::LuauCodegenIrTypeNames);
if (proto->upvalues)
{
CODEGEN_ASSERT(upval < proto->sizeupvalues);
if (proto->upvalues[upval])
return getstr(proto->upvalues[upval]);
}
return nullptr;
}
template<typename AssemblyBuilder> template<typename AssemblyBuilder>
static void logFunctionHeader(AssemblyBuilder& build, Proto* proto) static void logFunctionHeader(AssemblyBuilder& build, Proto* proto)
{ {
@ -29,12 +72,22 @@ static void logFunctionHeader(AssemblyBuilder& build, Proto* proto)
for (int i = 0; i < proto->numparams; i++) for (int i = 0; i < proto->numparams; i++)
{ {
LocVar* var = proto->locvars ? &proto->locvars[proto->sizelocvars - proto->numparams + i] : nullptr; if (FFlag::LuauCodegenIrTypeNames)
{
if (var && var->varname) if (const char* name = tryFindLocalName(proto, i, 0))
build.logAppend("%s%s", i == 0 ? "" : ", ", getstr(var->varname)); build.logAppend("%s%s", i == 0 ? "" : ", ", name);
else
build.logAppend("%s$arg%d", i == 0 ? "" : ", ", i);
}
else else
build.logAppend("%s$arg%d", i == 0 ? "" : ", ", i); {
LocVar* var = proto->locvars ? &proto->locvars[proto->sizelocvars - proto->numparams + i] : nullptr;
if (var && var->varname)
build.logAppend("%s%s", i == 0 ? "" : ", ", getstr(var->varname));
else
build.logAppend("%s$arg%d", i == 0 ? "" : ", ", i);
}
} }
if (proto->numparams != 0 && proto->is_vararg) if (proto->numparams != 0 && proto->is_vararg)
@ -59,21 +112,58 @@ static void logFunctionTypes(AssemblyBuilder& build, const IrFunction& function)
{ {
uint8_t ty = typeInfo.argumentTypes[i]; uint8_t ty = typeInfo.argumentTypes[i];
if (ty != LBC_TYPE_ANY) if (FFlag::LuauCodegenIrTypeNames)
build.logAppend("; R%d: %s [argument]\n", int(i), getBytecodeTypeName(ty)); {
if (ty != LBC_TYPE_ANY)
{
if (const char* name = tryFindLocalName(function.proto, int(i), 0))
build.logAppend("; R%d: %s [argument '%s']\n", int(i), getBytecodeTypeName(ty), name);
else
build.logAppend("; R%d: %s [argument]\n", int(i), getBytecodeTypeName(ty));
}
}
else
{
if (ty != LBC_TYPE_ANY)
build.logAppend("; R%d: %s [argument]\n", int(i), getBytecodeTypeName(ty));
}
} }
for (size_t i = 0; i < typeInfo.upvalueTypes.size(); i++) for (size_t i = 0; i < typeInfo.upvalueTypes.size(); i++)
{ {
uint8_t ty = typeInfo.upvalueTypes[i]; uint8_t ty = typeInfo.upvalueTypes[i];
if (ty != LBC_TYPE_ANY) if (FFlag::LuauCodegenIrTypeNames)
build.logAppend("; U%d: %s\n", int(i), getBytecodeTypeName(ty)); {
if (ty != LBC_TYPE_ANY)
{
if (const char* name = tryFindUpvalueName(function.proto, int(i)))
build.logAppend("; U%d: %s ['%s']\n", int(i), getBytecodeTypeName(ty), name);
else
build.logAppend("; U%d: %s\n", int(i), getBytecodeTypeName(ty));
}
}
else
{
if (ty != LBC_TYPE_ANY)
build.logAppend("; U%d: %s\n", int(i), getBytecodeTypeName(ty));
}
} }
for (const BytecodeRegTypeInfo& el : typeInfo.regTypes) for (const BytecodeRegTypeInfo& el : typeInfo.regTypes)
{ {
build.logAppend("; R%d: %s from %d to %d\n", el.reg, getBytecodeTypeName(el.type), el.startpc, el.endpc); if (FFlag::LuauCodegenIrTypeNames)
{
// Using last active position as the PC because 'startpc' for type info is before local is initialized
if (const char* name = tryFindLocalName(function.proto, el.reg, el.endpc - 1))
build.logAppend("; R%d: %s from %d to %d [local '%s']\n", el.reg, getBytecodeTypeName(el.type), el.startpc, el.endpc, name);
else
build.logAppend("; R%d: %s from %d to %d\n", el.reg, getBytecodeTypeName(el.type), el.startpc, el.endpc);
}
else
{
build.logAppend("; R%d: %s from %d to %d\n", el.reg, getBytecodeTypeName(el.type), el.startpc, el.endpc);
}
} }
} }

View file

@ -13,6 +13,7 @@
#include "ltm.h" #include "ltm.h"
LUAU_FASTFLAGVARIABLE(LuauCodegenDirectUserdataFlow, false) LUAU_FASTFLAGVARIABLE(LuauCodegenDirectUserdataFlow, false)
LUAU_FASTFLAGVARIABLE(LuauCodegenFixVectorFields, false)
namespace Luau namespace Luau
{ {
@ -1197,19 +1198,19 @@ void translateInstGetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
TString* str = gco2ts(build.function.proto->k[aux].value.gc); TString* str = gco2ts(build.function.proto->k[aux].value.gc);
const char* field = getstr(str); const char* field = getstr(str);
if (*field == 'X' || *field == 'x') if ((!FFlag::LuauCodegenFixVectorFields || str->len == 1) && (*field == 'X' || *field == 'x'))
{ {
IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(0)); IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(0));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value); build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value);
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER)); build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
} }
else if (*field == 'Y' || *field == 'y') else if ((!FFlag::LuauCodegenFixVectorFields || str->len == 1) && (*field == 'Y' || *field == 'y'))
{ {
IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(4)); IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(4));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value); build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value);
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER)); build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
} }
else if (*field == 'Z' || *field == 'z') else if ((!FFlag::LuauCodegenFixVectorFields || str->len == 1) && (*field == 'Z' || *field == 'z'))
{ {
IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(8)); IrOp value = build.inst(IrCmd::LOAD_FLOAT, build.vmReg(rb), build.constInt(8));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value); build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value);

View file

@ -454,7 +454,7 @@ BuiltinInfo getBuiltinInfo(int bfid)
case LBF_BUFFER_WRITEF32: case LBF_BUFFER_WRITEF32:
case LBF_BUFFER_WRITEF64: case LBF_BUFFER_WRITEF64:
return {3, 0, BuiltinInfo::Flag_NoneSafe}; return {3, 0, BuiltinInfo::Flag_NoneSafe};
}; }
LUAU_UNREACHABLE(); LUAU_UNREACHABLE();
} }

View file

@ -29,6 +29,7 @@ LUAU_FASTINTVARIABLE(LuauCompileInlineDepth, 5)
LUAU_FASTFLAGVARIABLE(LuauCompileRepeatUntilSkippedLocals, false) LUAU_FASTFLAGVARIABLE(LuauCompileRepeatUntilSkippedLocals, false)
LUAU_FASTFLAG(LuauCompileTypeInfo) LUAU_FASTFLAG(LuauCompileTypeInfo)
LUAU_FASTFLAGVARIABLE(LuauTypeInfoLookupImprovement, false) LUAU_FASTFLAGVARIABLE(LuauTypeInfoLookupImprovement, false)
LUAU_FASTFLAGVARIABLE(LuauCompileTempTypeInfo, false)
namespace Luau namespace Luau
{ {
@ -108,6 +109,8 @@ struct Compiler
, builtins(nullptr) , builtins(nullptr)
, functionTypes(nullptr) , functionTypes(nullptr)
, localTypes(nullptr) , localTypes(nullptr)
, exprTypes(nullptr)
, builtinTypes(options.vectorType)
{ {
// preallocate some buffers that are very likely to grow anyway; this works around std::vector's inefficient growth policy for small arrays // preallocate some buffers that are very likely to grow anyway; this works around std::vector's inefficient growth policy for small arrays
localStack.reserve(16); localStack.reserve(16);
@ -916,6 +919,9 @@ struct Compiler
bytecode.emitABC(LOP_NAMECALL, regs, selfreg, uint8_t(BytecodeBuilder::getStringHash(iname))); bytecode.emitABC(LOP_NAMECALL, regs, selfreg, uint8_t(BytecodeBuilder::getStringHash(iname)));
bytecode.emitAux(cid); bytecode.emitAux(cid);
if (FFlag::LuauCompileTempTypeInfo)
hintTemporaryExprRegType(fi->expr, selfreg, LBC_TYPE_TABLE, /* instLength */ 2);
} }
else if (bfid >= 0) else if (bfid >= 0)
{ {
@ -1570,6 +1576,9 @@ struct Compiler
uint8_t rl = compileExprAuto(expr->left, rs); uint8_t rl = compileExprAuto(expr->left, rs);
bytecode.emitABC(getBinaryOpArith(expr->op, /* k= */ true), target, rl, uint8_t(rc)); bytecode.emitABC(getBinaryOpArith(expr->op, /* k= */ true), target, rl, uint8_t(rc));
if (FFlag::LuauCompileTempTypeInfo)
hintTemporaryExprRegType(expr->left, rl, LBC_TYPE_NUMBER, /* instLength */ 1);
} }
else else
{ {
@ -1583,6 +1592,9 @@ struct Compiler
LuauOpcode op = (expr->op == AstExprBinary::Sub) ? LOP_SUBRK : LOP_DIVRK; LuauOpcode op = (expr->op == AstExprBinary::Sub) ? LOP_SUBRK : LOP_DIVRK;
bytecode.emitABC(op, target, uint8_t(lc), uint8_t(rr)); bytecode.emitABC(op, target, uint8_t(lc), uint8_t(rr));
if (FFlag::LuauCompileTempTypeInfo)
hintTemporaryExprRegType(expr->right, rr, LBC_TYPE_NUMBER, /* instLength */ 1);
return; return;
} }
} }
@ -1591,6 +1603,12 @@ struct Compiler
uint8_t rr = compileExprAuto(expr->right, rs); uint8_t rr = compileExprAuto(expr->right, rs);
bytecode.emitABC(getBinaryOpArith(expr->op), target, rl, rr); bytecode.emitABC(getBinaryOpArith(expr->op), target, rl, rr);
if (FFlag::LuauCompileTempTypeInfo)
{
hintTemporaryExprRegType(expr->left, rl, LBC_TYPE_NUMBER, /* instLength */ 1);
hintTemporaryExprRegType(expr->right, rr, LBC_TYPE_NUMBER, /* instLength */ 1);
}
} }
} }
break; break;
@ -2030,6 +2048,9 @@ struct Compiler
bytecode.emitABC(LOP_GETTABLEKS, target, reg, uint8_t(BytecodeBuilder::getStringHash(iname))); bytecode.emitABC(LOP_GETTABLEKS, target, reg, uint8_t(BytecodeBuilder::getStringHash(iname)));
bytecode.emitAux(cid); bytecode.emitAux(cid);
if (FFlag::LuauCompileTempTypeInfo)
hintTemporaryExprRegType(expr->expr, reg, LBC_TYPE_TABLE, /* instLength */ 2);
} }
void compileExprIndexExpr(AstExprIndexExpr* expr, uint8_t target) void compileExprIndexExpr(AstExprIndexExpr* expr, uint8_t target)
@ -3410,6 +3431,14 @@ struct Compiler
uint8_t rr = compileExprAuto(stat->value, rs); uint8_t rr = compileExprAuto(stat->value, rs);
bytecode.emitABC(getBinaryOpArith(stat->op), target, target, rr); bytecode.emitABC(getBinaryOpArith(stat->op), target, target, rr);
if (FFlag::LuauCompileTempTypeInfo)
{
if (var.kind != LValue::Kind_Local)
hintTemporaryRegType(stat->var, target, LBC_TYPE_NUMBER, /* instLength */ 1);
hintTemporaryExprRegType(stat->value, rr, LBC_TYPE_NUMBER, /* instLength */ 1);
}
} }
} }
break; break;
@ -3794,6 +3823,27 @@ struct Compiler
return !node->is<AstStatBlock>() && !node->is<AstStatTypeAlias>(); return !node->is<AstStatBlock>() && !node->is<AstStatTypeAlias>();
} }
void hintTemporaryRegType(AstExpr* expr, int reg, LuauBytecodeType expectedType, int instLength)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
// If we know the type of a temporary and it's not the type that would be expected by codegen, provide a hint
if (LuauBytecodeType* ty = exprTypes.find(expr))
{
if (*ty != expectedType)
bytecode.pushLocalTypeInfo(*ty, reg, bytecode.getDebugPC() - instLength, bytecode.getDebugPC());
}
}
void hintTemporaryExprRegType(AstExpr* expr, int reg, LuauBytecodeType expectedType, int instLength)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
// If we allocated a temporary register for the operation argument, try hinting its type
if (!getExprLocal(expr))
hintTemporaryRegType(expr, reg, expectedType, instLength);
}
struct FenvVisitor : AstVisitor struct FenvVisitor : AstVisitor
{ {
bool& getfenvUsed; bool& getfenvUsed;
@ -4046,6 +4096,9 @@ struct Compiler
DenseHashMap<AstExprCall*, int> builtins; DenseHashMap<AstExprCall*, int> builtins;
DenseHashMap<AstExprFunction*, std::string> functionTypes; DenseHashMap<AstExprFunction*, std::string> functionTypes;
DenseHashMap<AstLocal*, LuauBytecodeType> localTypes; DenseHashMap<AstLocal*, LuauBytecodeType> localTypes;
DenseHashMap<AstExpr*, LuauBytecodeType> exprTypes;
BuiltinTypes builtinTypes;
const DenseHashMap<AstExprCall*, int>* builtinsFold = nullptr; const DenseHashMap<AstExprCall*, int>* builtinsFold = nullptr;
bool builtinsFoldMathK = false; bool builtinsFoldMathK = false;
@ -4141,12 +4194,14 @@ void compileOrThrow(BytecodeBuilder& bytecode, const ParseResult& parseResult, c
if (FFlag::LuauCompileTypeInfo) if (FFlag::LuauCompileTypeInfo)
{ {
if (options.typeInfoLevel >= 1) if (options.typeInfoLevel >= 1)
buildTypeMap(compiler.functionTypes, compiler.localTypes, root, options.vectorType); buildTypeMap(compiler.functionTypes, compiler.localTypes, compiler.exprTypes, root, options.vectorType, compiler.builtinTypes,
compiler.builtins, compiler.globals);
} }
else else
{ {
if (functionVisitor.hasTypes) if (functionVisitor.hasTypes)
buildTypeMap(compiler.functionTypes, compiler.localTypes, root, options.vectorType); buildTypeMap(compiler.functionTypes, compiler.localTypes, compiler.exprTypes, root, options.vectorType, compiler.builtinTypes,
compiler.builtins, compiler.globals);
} }
for (AstExprFunction* expr : functions) for (AstExprFunction* expr : functions)

View file

@ -4,6 +4,7 @@
#include "Luau/BytecodeBuilder.h" #include "Luau/BytecodeBuilder.h"
LUAU_FASTFLAG(LuauCompileTypeInfo) LUAU_FASTFLAG(LuauCompileTypeInfo)
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
namespace Luau namespace Luau
{ {
@ -37,10 +38,10 @@ static LuauBytecodeType getPrimitiveType(AstName name)
return LBC_TYPE_INVALID; return LBC_TYPE_INVALID;
} }
static LuauBytecodeType getType(AstType* ty, const AstArray<AstGenericType>& generics, const DenseHashMap<AstName, AstStatTypeAlias*>& typeAliases, static LuauBytecodeType getType(const AstType* ty, const AstArray<AstGenericType>& generics,
bool resolveAliases, const char* vectorType) const DenseHashMap<AstName, AstStatTypeAlias*>& typeAliases, bool resolveAliases, const char* vectorType)
{ {
if (AstTypeReference* ref = ty->as<AstTypeReference>()) if (const AstTypeReference* ref = ty->as<AstTypeReference>())
{ {
if (ref->prefix) if (ref->prefix)
return LBC_TYPE_ANY; return LBC_TYPE_ANY;
@ -66,15 +67,15 @@ static LuauBytecodeType getType(AstType* ty, const AstArray<AstGenericType>& gen
// not primitive or alias or generic => host-provided, we assume userdata for now // not primitive or alias or generic => host-provided, we assume userdata for now
return LBC_TYPE_USERDATA; return LBC_TYPE_USERDATA;
} }
else if (AstTypeTable* table = ty->as<AstTypeTable>()) else if (const AstTypeTable* table = ty->as<AstTypeTable>())
{ {
return LBC_TYPE_TABLE; return LBC_TYPE_TABLE;
} }
else if (AstTypeFunction* func = ty->as<AstTypeFunction>()) else if (const AstTypeFunction* func = ty->as<AstTypeFunction>())
{ {
return LBC_TYPE_FUNCTION; return LBC_TYPE_FUNCTION;
} }
else if (AstTypeUnion* un = ty->as<AstTypeUnion>()) else if (const AstTypeUnion* un = ty->as<AstTypeUnion>())
{ {
bool optional = false; bool optional = false;
LuauBytecodeType type = LBC_TYPE_INVALID; LuauBytecodeType type = LBC_TYPE_INVALID;
@ -104,7 +105,7 @@ static LuauBytecodeType getType(AstType* ty, const AstArray<AstGenericType>& gen
return LuauBytecodeType(type | (optional && (type != LBC_TYPE_ANY) ? LBC_TYPE_OPTIONAL_BIT : 0)); return LuauBytecodeType(type | (optional && (type != LBC_TYPE_ANY) ? LBC_TYPE_OPTIONAL_BIT : 0));
} }
else if (AstTypeIntersection* inter = ty->as<AstTypeIntersection>()) else if (const AstTypeIntersection* inter = ty->as<AstTypeIntersection>())
{ {
return LBC_TYPE_ANY; return LBC_TYPE_ANY;
} }
@ -144,21 +145,44 @@ static std::string getFunctionType(const AstExprFunction* func, const DenseHashM
return typeInfo; return typeInfo;
} }
static bool isMatchingGlobal(const DenseHashMap<AstName, Compile::Global>& globals, AstExpr* node, const char* name)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
if (AstExprGlobal* expr = node->as<AstExprGlobal>())
return Compile::getGlobalState(globals, expr->name) == Compile::Global::Default && expr->name == name;
return false;
}
struct TypeMapVisitor : AstVisitor struct TypeMapVisitor : AstVisitor
{ {
DenseHashMap<AstExprFunction*, std::string>& functionTypes; DenseHashMap<AstExprFunction*, std::string>& functionTypes;
DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes; DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes;
DenseHashMap<AstExpr*, LuauBytecodeType>& exprTypes;
const char* vectorType; const char* vectorType;
const BuiltinTypes& builtinTypes;
const DenseHashMap<AstExprCall*, int>& builtinCalls;
const DenseHashMap<AstName, Compile::Global>& globals;
DenseHashMap<AstName, AstStatTypeAlias*> typeAliases; DenseHashMap<AstName, AstStatTypeAlias*> typeAliases;
std::vector<std::pair<AstName, AstStatTypeAlias*>> typeAliasStack; std::vector<std::pair<AstName, AstStatTypeAlias*>> typeAliasStack;
DenseHashMap<AstLocal*, const AstType*> resolvedLocals;
DenseHashMap<AstExpr*, const AstType*> resolvedExprs;
TypeMapVisitor( TypeMapVisitor(DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes,
DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes, const char* vectorType) DenseHashMap<AstExpr*, LuauBytecodeType>& exprTypes, const char* vectorType, const BuiltinTypes& builtinTypes,
const DenseHashMap<AstExprCall*, int>& builtinCalls, const DenseHashMap<AstName, Compile::Global>& globals)
: functionTypes(functionTypes) : functionTypes(functionTypes)
, localTypes(localTypes) , localTypes(localTypes)
, exprTypes(exprTypes)
, vectorType(vectorType) , vectorType(vectorType)
, builtinTypes(builtinTypes)
, builtinCalls(builtinCalls)
, globals(globals)
, typeAliases(AstName()) , typeAliases(AstName())
, resolvedLocals(nullptr)
, resolvedExprs(nullptr)
{ {
} }
@ -189,6 +213,64 @@ struct TypeMapVisitor : AstVisitor
} }
} }
const AstType* resolveAliases(const AstType* ty)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
if (const AstTypeReference* ref = ty->as<AstTypeReference>())
{
if (ref->prefix)
return ty;
if (AstStatTypeAlias* const* alias = typeAliases.find(ref->name); alias && *alias)
return (*alias)->type;
}
return ty;
}
const AstTableIndexer* tryGetTableIndexer(AstExpr* expr)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
if (const AstType** typePtr = resolvedExprs.find(expr))
{
if (const AstTypeTable* tableTy = (*typePtr)->as<AstTypeTable>())
return tableTy->indexer;
}
return nullptr;
}
LuauBytecodeType recordResolvedType(AstExpr* expr, const AstType* ty)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
ty = resolveAliases(ty);
resolvedExprs[expr] = ty;
LuauBytecodeType bty = getType(ty, {}, typeAliases, /* resolveAliases= */ true, vectorType);
exprTypes[expr] = bty;
return bty;
}
LuauBytecodeType recordResolvedType(AstLocal* local, const AstType* ty)
{
LUAU_ASSERT(FFlag::LuauCompileTempTypeInfo);
ty = resolveAliases(ty);
resolvedLocals[local] = ty;
LuauBytecodeType bty = getType(ty, {}, typeAliases, /* resolveAliases= */ true, vectorType);
if (bty != LBC_TYPE_ANY)
localTypes[local] = bty;
return bty;
}
bool visit(AstStatBlock* node) override bool visit(AstStatBlock* node) override
{ {
size_t aliasStackTop = pushTypeAliases(node); size_t aliasStackTop = pushTypeAliases(node);
@ -216,6 +298,60 @@ struct TypeMapVisitor : AstVisitor
return false; return false;
} }
// for...in statement can contain type annotations on locals (we might even infer some for ipairs/pairs/generalized iteration)
bool visit(AstStatForIn* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
for (AstExpr* expr : node->values)
expr->visit(this);
// This is similar to how Compiler matches builtin iteration, but we also handle generalized iteration case
if (node->vars.size == 2 && node->values.size == 1)
{
if (AstExprCall* call = node->values.data[0]->as<AstExprCall>(); call && call->args.size == 1)
{
AstExpr* func = call->func;
AstExpr* arg = call->args.data[0];
if (isMatchingGlobal(globals, func, "ipairs"))
{
if (const AstTableIndexer* indexer = tryGetTableIndexer(arg))
{
recordResolvedType(node->vars.data[0], &builtinTypes.numberType);
recordResolvedType(node->vars.data[1], indexer->resultType);
}
}
else if (isMatchingGlobal(globals, func, "pairs"))
{
if (const AstTableIndexer* indexer = tryGetTableIndexer(arg))
{
recordResolvedType(node->vars.data[0], indexer->indexType);
recordResolvedType(node->vars.data[1], indexer->resultType);
}
}
}
else if (const AstTableIndexer* indexer = tryGetTableIndexer(node->values.data[0]))
{
recordResolvedType(node->vars.data[0], indexer->indexType);
recordResolvedType(node->vars.data[1], indexer->resultType);
}
}
for (size_t i = 0; i < node->vars.size; i++)
{
AstLocal* var = node->vars.data[i];
if (AstType* annotation = var->annotation)
recordResolvedType(var, annotation);
}
node->body->visit(this);
return false;
}
bool visit(AstExprFunction* node) override bool visit(AstExprFunction* node) override
{ {
std::string type = getFunctionType(node, typeAliases, vectorType); std::string type = getFunctionType(node, typeAliases, vectorType);
@ -223,32 +359,405 @@ struct TypeMapVisitor : AstVisitor
if (!type.empty()) if (!type.empty())
functionTypes[node] = std::move(type); functionTypes[node] = std::move(type);
return true; return true; // Let generic visitor step into all expressions
} }
bool visit(AstExprLocal* node) override bool visit(AstExprLocal* node) override
{ {
if (FFlag::LuauCompileTypeInfo) if (FFlag::LuauCompileTempTypeInfo)
{ {
AstLocal* local = node->local; if (FFlag::LuauCompileTypeInfo)
if (AstType* annotation = local->annotation)
{ {
LuauBytecodeType ty = getType(annotation, {}, typeAliases, /* resolveAliases= */ true, vectorType); AstLocal* local = node->local;
if (ty != LBC_TYPE_ANY) if (AstType* annotation = local->annotation)
localTypes[local] = ty; {
LuauBytecodeType ty = recordResolvedType(node, annotation);
if (ty != LBC_TYPE_ANY)
localTypes[local] = ty;
}
else if (const AstType** typePtr = resolvedLocals.find(local))
{
localTypes[local] = recordResolvedType(node, *typePtr);
}
}
return false;
}
else
{
if (FFlag::LuauCompileTypeInfo)
{
AstLocal* local = node->local;
if (AstType* annotation = local->annotation)
{
LuauBytecodeType ty = getType(annotation, {}, typeAliases, /* resolveAliases= */ true, vectorType);
if (ty != LBC_TYPE_ANY)
localTypes[local] = ty;
}
}
return true;
}
}
bool visit(AstStatLocal* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
for (AstExpr* expr : node->values)
expr->visit(this);
for (size_t i = 0; i < node->vars.size; i++)
{
AstLocal* var = node->vars.data[i];
// Propagate from the value that's being assigned
// This simple propagation doesn't handle type packs in tail position
if (var->annotation == nullptr)
{
if (i < node->values.size)
{
if (const AstType** typePtr = resolvedExprs.find(node->values.data[i]))
resolvedLocals[var] = *typePtr;
}
} }
} }
return true; return false;
} }
bool visit(AstExprIndexExpr* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->expr->visit(this);
node->index->visit(this);
if (const AstTableIndexer* indexer = tryGetTableIndexer(node->expr))
recordResolvedType(node, indexer->resultType);
return false;
}
bool visit(AstExprIndexName* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->expr->visit(this);
if (const AstType** typePtr = resolvedExprs.find(node->expr))
{
if (const AstTypeTable* tableTy = (*typePtr)->as<AstTypeTable>())
{
for (const AstTableProp& prop : tableTy->props)
{
if (prop.name == node->index)
{
recordResolvedType(node, prop.type);
return false;
}
}
}
}
if (LuauBytecodeType* typeBcPtr = exprTypes.find(node->expr))
{
if (*typeBcPtr == LBC_TYPE_VECTOR)
{
if (node->index == "X" || node->index == "Y" || node->index == "Z")
recordResolvedType(node, &builtinTypes.numberType);
}
}
return false;
}
bool visit(AstExprUnary* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->expr->visit(this);
switch (node->op)
{
case AstExprUnary::Not:
recordResolvedType(node, &builtinTypes.booleanType);
break;
case AstExprUnary::Minus:
{
const AstType** typePtr = resolvedExprs.find(node->expr);
LuauBytecodeType* bcTypePtr = exprTypes.find(node->expr);
if (!typePtr || !bcTypePtr)
return false;
if (*bcTypePtr == LBC_TYPE_VECTOR)
recordResolvedType(node, *typePtr);
else if (*bcTypePtr == LBC_TYPE_NUMBER)
recordResolvedType(node, *typePtr);
break;
}
case AstExprUnary::Len:
recordResolvedType(node, &builtinTypes.numberType);
break;
}
return false;
}
bool visit(AstExprBinary* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->left->visit(this);
node->right->visit(this);
// Comparisons result in a boolean
if (node->op == AstExprBinary::CompareNe || node->op == AstExprBinary::CompareEq || node->op == AstExprBinary::CompareLt ||
node->op == AstExprBinary::CompareLe || node->op == AstExprBinary::CompareGt || node->op == AstExprBinary::CompareGe)
{
recordResolvedType(node, &builtinTypes.booleanType);
return false;
}
if (node->op == AstExprBinary::Concat || node->op == AstExprBinary::And || node->op == AstExprBinary::Or)
return false;
const AstType** leftTypePtr = resolvedExprs.find(node->left);
LuauBytecodeType* leftBcTypePtr = exprTypes.find(node->left);
if (!leftTypePtr || !leftBcTypePtr)
return false;
const AstType** rightTypePtr = resolvedExprs.find(node->right);
LuauBytecodeType* rightBcTypePtr = exprTypes.find(node->right);
if (!rightTypePtr || !rightBcTypePtr)
return false;
if (*leftBcTypePtr == LBC_TYPE_VECTOR)
recordResolvedType(node, *leftTypePtr);
else if (*rightBcTypePtr == LBC_TYPE_VECTOR)
recordResolvedType(node, *rightTypePtr);
else if (*leftBcTypePtr == LBC_TYPE_NUMBER && *rightBcTypePtr == LBC_TYPE_NUMBER)
recordResolvedType(node, *leftTypePtr);
return false;
}
bool visit(AstExprGroup* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->expr->visit(this);
if (const AstType** typePtr = resolvedExprs.find(node->expr))
recordResolvedType(node, *typePtr);
return false;
}
bool visit(AstExprTypeAssertion* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->expr->visit(this);
recordResolvedType(node, node->annotation);
return false;
}
bool visit(AstExprConstantBool* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
recordResolvedType(node, &builtinTypes.booleanType);
return false;
}
bool visit(AstExprConstantNumber* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
recordResolvedType(node, &builtinTypes.numberType);
return false;
}
bool visit(AstExprConstantString* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
recordResolvedType(node, &builtinTypes.stringType);
return false;
}
bool visit(AstExprInterpString* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
recordResolvedType(node, &builtinTypes.stringType);
return false;
}
bool visit(AstExprIfElse* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
node->condition->visit(this);
node->trueExpr->visit(this);
node->falseExpr->visit(this);
const AstType** trueTypePtr = resolvedExprs.find(node->trueExpr);
LuauBytecodeType* trueBcTypePtr = exprTypes.find(node->trueExpr);
LuauBytecodeType* falseBcTypePtr = exprTypes.find(node->falseExpr);
// Optimistic check that both expressions are of the same kind, as AstType* cannot be compared
if (trueTypePtr && trueBcTypePtr && falseBcTypePtr && *trueBcTypePtr == *falseBcTypePtr)
recordResolvedType(node, *trueTypePtr);
return false;
}
bool visit(AstExprCall* node) override
{
if (!FFlag::LuauCompileTempTypeInfo)
return true;
if (const int* bfid = builtinCalls.find(node))
{
switch (LuauBuiltinFunction(*bfid))
{
case LBF_NONE:
case LBF_ASSERT:
case LBF_RAWSET:
case LBF_RAWGET:
case LBF_TABLE_INSERT:
case LBF_TABLE_UNPACK:
case LBF_SELECT_VARARG:
case LBF_GETMETATABLE:
case LBF_SETMETATABLE:
case LBF_BUFFER_WRITEU8:
case LBF_BUFFER_WRITEU16:
case LBF_BUFFER_WRITEU32:
case LBF_BUFFER_WRITEF32:
case LBF_BUFFER_WRITEF64:
break;
case LBF_MATH_ABS:
case LBF_MATH_ACOS:
case LBF_MATH_ASIN:
case LBF_MATH_ATAN2:
case LBF_MATH_ATAN:
case LBF_MATH_CEIL:
case LBF_MATH_COSH:
case LBF_MATH_COS:
case LBF_MATH_DEG:
case LBF_MATH_EXP:
case LBF_MATH_FLOOR:
case LBF_MATH_FMOD:
case LBF_MATH_FREXP:
case LBF_MATH_LDEXP:
case LBF_MATH_LOG10:
case LBF_MATH_LOG:
case LBF_MATH_MAX:
case LBF_MATH_MIN:
case LBF_MATH_MODF:
case LBF_MATH_POW:
case LBF_MATH_RAD:
case LBF_MATH_SINH:
case LBF_MATH_SIN:
case LBF_MATH_SQRT:
case LBF_MATH_TANH:
case LBF_MATH_TAN:
case LBF_BIT32_ARSHIFT:
case LBF_BIT32_BAND:
case LBF_BIT32_BNOT:
case LBF_BIT32_BOR:
case LBF_BIT32_BXOR:
case LBF_BIT32_BTEST:
case LBF_BIT32_EXTRACT:
case LBF_BIT32_LROTATE:
case LBF_BIT32_LSHIFT:
case LBF_BIT32_REPLACE:
case LBF_BIT32_RROTATE:
case LBF_BIT32_RSHIFT:
case LBF_STRING_BYTE:
case LBF_STRING_LEN:
case LBF_MATH_CLAMP:
case LBF_MATH_SIGN:
case LBF_MATH_ROUND:
case LBF_BIT32_COUNTLZ:
case LBF_BIT32_COUNTRZ:
case LBF_RAWLEN:
case LBF_BIT32_EXTRACTK:
case LBF_TONUMBER:
case LBF_BIT32_BYTESWAP:
case LBF_BUFFER_READI8:
case LBF_BUFFER_READU8:
case LBF_BUFFER_READI16:
case LBF_BUFFER_READU16:
case LBF_BUFFER_READI32:
case LBF_BUFFER_READU32:
case LBF_BUFFER_READF32:
case LBF_BUFFER_READF64:
recordResolvedType(node, &builtinTypes.numberType);
break;
case LBF_TYPE:
case LBF_STRING_CHAR:
case LBF_TYPEOF:
case LBF_STRING_SUB:
case LBF_TOSTRING:
recordResolvedType(node, &builtinTypes.stringType);
break;
case LBF_RAWEQUAL:
recordResolvedType(node, &builtinTypes.booleanType);
break;
case LBF_VECTOR:
recordResolvedType(node, &builtinTypes.vectorType);
break;
}
}
return true; // Let generic visitor step into all expressions
}
// AstExpr classes that are not covered:
// * AstExprConstantNil is not resolved to 'nil' because that doesn't help codegen operations and often used as an initializer before real value
// * AstExprGlobal is not supported as we don't have info on globals
// * AstExprVarargs cannot be resolved to a testable type
// * AstExprTable cannot be reconstructed into a specific AstTypeTable and table annotations don't really help codegen
// * AstExprCall is very complex (especially if builtins and registered globals are included), will be extended in the future
}; };
void buildTypeMap(DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes, AstNode* root, void buildTypeMap(DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes,
const char* vectorType) DenseHashMap<AstExpr*, LuauBytecodeType>& exprTypes, AstNode* root, const char* vectorType, const BuiltinTypes& builtinTypes,
const DenseHashMap<AstExprCall*, int>& builtinCalls, const DenseHashMap<AstName, Compile::Global>& globals)
{ {
TypeMapVisitor visitor(functionTypes, localTypes, vectorType); TypeMapVisitor visitor(functionTypes, localTypes, exprTypes, vectorType, builtinTypes, builtinCalls, globals);
root->visit(&visitor); root->visit(&visitor);
} }

View file

@ -4,13 +4,29 @@
#include "Luau/Ast.h" #include "Luau/Ast.h"
#include "Luau/Bytecode.h" #include "Luau/Bytecode.h"
#include "Luau/DenseHash.h" #include "Luau/DenseHash.h"
#include "ValueTracking.h"
#include <string> #include <string>
namespace Luau namespace Luau
{ {
void buildTypeMap(DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes, AstNode* root, struct BuiltinTypes
const char* vectorType); {
BuiltinTypes(const char* vectorType)
: vectorType{{}, std::nullopt, AstName{vectorType}, std::nullopt, {}}
{
}
// AstName use here will not match the AstNameTable, but the was we use them here always force a full string compare
AstTypeReference booleanType{{}, std::nullopt, AstName{"boolean"}, std::nullopt, {}};
AstTypeReference numberType{{}, std::nullopt, AstName{"number"}, std::nullopt, {}};
AstTypeReference stringType{{}, std::nullopt, AstName{"string"}, std::nullopt, {}};
AstTypeReference vectorType;
};
void buildTypeMap(DenseHashMap<AstExprFunction*, std::string>& functionTypes, DenseHashMap<AstLocal*, LuauBytecodeType>& localTypes,
DenseHashMap<AstExpr*, LuauBytecodeType>& exprTypes, AstNode* root, const char* vectorType, const BuiltinTypes& builtinTypes,
const DenseHashMap<AstExprCall*, int>& builtinCalls, const DenseHashMap<AstName, Compile::Global>& globals);
} // namespace Luau } // namespace Luau

View file

@ -12,6 +12,8 @@
#include <string.h> #include <string.h>
#include <stdio.h> #include <stdio.h>
LUAU_FASTFLAGVARIABLE(LuauPushErrorStackCheck, false)
static const char* getfuncname(Closure* f); static const char* getfuncname(Closure* f);
static int currentpc(lua_State* L, CallInfo* ci) static int currentpc(lua_State* L, CallInfo* ci)
@ -330,12 +332,18 @@ l_noret luaG_runerrorL(lua_State* L, const char* fmt, ...)
vsnprintf(result, sizeof(result), fmt, argp); vsnprintf(result, sizeof(result), fmt, argp);
va_end(argp); va_end(argp);
if (FFlag::LuauPushErrorStackCheck)
lua_rawcheckstack(L, 1);
pusherror(L, result); pusherror(L, result);
luaD_throw(L, LUA_ERRRUN); luaD_throw(L, LUA_ERRRUN);
} }
void luaG_pusherror(lua_State* L, const char* error) void luaG_pusherror(lua_State* L, const char* error)
{ {
if (FFlag::LuauPushErrorStackCheck)
lua_rawcheckstack(L, 1);
pusherror(L, error); pusherror(L, error);
} }

View file

@ -53,6 +53,10 @@
* for each block size there's a page free list that contains pages that have at least one free block * for each block size there's a page free list that contains pages that have at least one free block
* (global_State::freegcopages). This free list is used to make sure object allocation is O(1). * (global_State::freegcopages). This free list is used to make sure object allocation is O(1).
* *
* When LUAU_ASSERTENABLED is enabled, all non-GCO pages are also linked in a list (global_State::allpages).
* Because this list is not strictly required for runtime operations, it is only tracked for the purposes of
* debugging. While overhead of linking those pages together is very small, unnecessary operations are avoided.
*
* Compared to GCOs, regular allocations have two important differences: they can be freed in isolation, * Compared to GCOs, regular allocations have two important differences: they can be freed in isolation,
* and they don't start with a GC header. Because of this, each allocation is prefixed with block metadata, * and they don't start with a GC header. Because of this, each allocation is prefixed with block metadata,
* which contains the pointer to the page for allocated blocks, and the pointer to the next free block * which contains the pointer to the page for allocated blocks, and the pointer to the next free block
@ -190,6 +194,12 @@ const SizeClassConfig kSizeClassConfig;
#define metadata(block) (*(void**)(block)) #define metadata(block) (*(void**)(block))
#define freegcolink(block) (*(void**)((char*)block + kGCOLinkOffset)) #define freegcolink(block) (*(void**)((char*)block + kGCOLinkOffset))
#if defined(LUAU_ASSERTENABLED)
#define debugpageset(x) (x)
#else
#define debugpageset(x) NULL
#endif
struct lua_Page struct lua_Page
{ {
// list of pages with free blocks // list of pages with free blocks
@ -336,7 +346,7 @@ static void* newblock(lua_State* L, int sizeClass)
// slow path: no page in the freelist, allocate a new one // slow path: no page in the freelist, allocate a new one
if (!page) if (!page)
page = newclasspage(L, g->freepages, NULL, sizeClass, true); page = newclasspage(L, g->freepages, debugpageset(&g->allpages), sizeClass, true);
LUAU_ASSERT(!page->prev); LUAU_ASSERT(!page->prev);
LUAU_ASSERT(page->freeList || page->freeNext >= 0); LUAU_ASSERT(page->freeList || page->freeNext >= 0);
@ -457,7 +467,7 @@ static void freeblock(lua_State* L, int sizeClass, void* block)
// if it's the last block in the page, we don't need the page // if it's the last block in the page, we don't need the page
if (page->busyBlocks == 0) if (page->busyBlocks == 0)
freeclasspage(L, g->freepages, NULL, page, sizeClass); freeclasspage(L, g->freepages, debugpageset(&g->allpages), page, sizeClass);
} }
static void freegcoblock(lua_State* L, int sizeClass, void* block, lua_Page* page) static void freegcoblock(lua_State* L, int sizeClass, void* block, lua_Page* page)

View file

@ -204,6 +204,7 @@ lua_State* lua_newstate(lua_Alloc f, void* ud)
g->freepages[i] = NULL; g->freepages[i] = NULL;
g->freegcopages[i] = NULL; g->freegcopages[i] = NULL;
} }
g->allpages = NULL;
g->allgcopages = NULL; g->allgcopages = NULL;
g->sweepgcopage = NULL; g->sweepgcopage = NULL;
for (i = 0; i < LUA_T_COUNT; i++) for (i = 0; i < LUA_T_COUNT; i++)

View file

@ -188,7 +188,8 @@ typedef struct global_State
struct lua_Page* freepages[LUA_SIZECLASSES]; // free page linked list for each size class for non-collectable objects struct lua_Page* freepages[LUA_SIZECLASSES]; // free page linked list for each size class for non-collectable objects
struct lua_Page* freegcopages[LUA_SIZECLASSES]; // free page linked list for each size class for collectable objects struct lua_Page* freegcopages[LUA_SIZECLASSES]; // free page linked list for each size class for collectable objects
struct lua_Page* allgcopages; // page linked list with all pages for all classes struct lua_Page* allpages; // page linked list with all pages for all non-collectable object classes (available with LUAU_ASSERTENABLED)
struct lua_Page* allgcopages; // page linked list with all pages for all collectable object classes
struct lua_Page* sweepgcopage; // position of the sweep in `allgcopages' struct lua_Page* sweepgcopage; // position of the sweep in `allgcopages'
size_t memcatbytes[LUA_MEMORY_CATEGORIES]; // total amount of memory used by each memory category size_t memcatbytes[LUA_MEMORY_CATEGORIES]; // total amount of memory used by each memory category

View file

@ -12,6 +12,7 @@
#include "lvm.h" #include "lvm.h"
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFastCrossTableMove, false) LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFastCrossTableMove, false)
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFastTableMaxn, false)
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFasterConcat, false) LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauFasterConcat, false)
static int foreachi(lua_State* L) static int foreachi(lua_State* L)
@ -55,17 +56,45 @@ static int maxn(lua_State* L)
{ {
double max = 0; double max = 0;
luaL_checktype(L, 1, LUA_TTABLE); luaL_checktype(L, 1, LUA_TTABLE);
lua_pushnil(L); // first key
while (lua_next(L, 1)) if (DFFlag::LuauFastTableMaxn)
{ {
lua_pop(L, 1); // remove value Table* t = hvalue(L->base);
if (lua_type(L, -1) == LUA_TNUMBER)
for (int i = 0; i < t->sizearray; i++)
{ {
double v = lua_tonumber(L, -1); if (!ttisnil(&t->array[i]))
if (v > max) max = i + 1;
max = v; }
for (int i = 0; i < sizenode(t); i++)
{
LuaNode* n = gnode(t, i);
if (!ttisnil(gval(n)) && ttisnumber(gkey(n)))
{
double v = nvalue(gkey(n));
if (v > max)
max = v;
}
} }
} }
else
{
lua_pushnil(L); // first key
while (lua_next(L, 1))
{
lua_pop(L, 1); // remove value
if (lua_type(L, -1) == LUA_TNUMBER)
{
double v = lua_tonumber(L, -1);
if (v > max)
max = v;
}
}
}
lua_pushnumber(L, max); lua_pushnumber(L, max);
return 1; return 1;
} }

View file

@ -18,8 +18,12 @@ LUAU_FASTFLAG(LuauCompileTypeInfo)
LUAU_FASTFLAG(LuauLoadTypeInfo) LUAU_FASTFLAG(LuauLoadTypeInfo)
LUAU_FASTFLAG(LuauCodegenTypeInfo) LUAU_FASTFLAG(LuauCodegenTypeInfo)
LUAU_FASTFLAG(LuauTypeInfoLookupImprovement) LUAU_FASTFLAG(LuauTypeInfoLookupImprovement)
LUAU_FASTFLAG(LuauCodegenIrTypeNames)
LUAU_FASTFLAG(LuauCompileTempTypeInfo)
LUAU_FASTFLAG(LuauCodegenFixVectorFields)
LUAU_FASTFLAG(LuauCodegenVectorMispredictFix)
static std::string getCodegenAssembly(const char* source, bool includeIrTypes = false) static std::string getCodegenAssembly(const char* source, bool includeIrTypes = false, int debugLevel = 1)
{ {
Luau::CodeGen::AssemblyOptions options; Luau::CodeGen::AssemblyOptions options;
@ -47,7 +51,7 @@ static std::string getCodegenAssembly(const char* source, bool includeIrTypes =
Luau::CompileOptions copts = {}; Luau::CompileOptions copts = {};
copts.optimizationLevel = 2; copts.optimizationLevel = 2;
copts.debugLevel = 1; copts.debugLevel = debugLevel;
copts.typeInfoLevel = 1; copts.typeInfoLevel = 1;
copts.vectorCtor = "vector"; copts.vectorCtor = "vector";
copts.vectorType = "vector"; copts.vectorType = "vector";
@ -66,6 +70,20 @@ static std::string getCodegenAssembly(const char* source, bool includeIrTypes =
return ""; return "";
} }
static std::string getCodegenHeader(const char* source)
{
std::string assembly = getCodegenAssembly(source, /* includeIrTypes */ true, /* debugLevel */ 2);
auto bytecodeStart = assembly.find("bb_bytecode_0:");
if (bytecodeStart == std::string::npos)
bytecodeStart = assembly.find("bb_0:");
REQUIRE(bytecodeStart != std::string::npos);
return assembly.substr(0, bytecodeStart);
}
TEST_SUITE_BEGIN("IrLowering"); TEST_SUITE_BEGIN("IrLowering");
TEST_CASE("VectorReciprocal") TEST_CASE("VectorReciprocal")
@ -451,6 +469,50 @@ bb_bytecode_1:
)"); )");
} }
TEST_CASE("VectorRandomProp")
{
ScopedFastFlag luauCodegenRemoveDeadStores{FFlag::LuauCodegenRemoveDeadStores5, true};
ScopedFastFlag luauCodegenFixVectorFields{FFlag::LuauCodegenFixVectorFields, true};
ScopedFastFlag luauCodegenVectorMispredictFix{FFlag::LuauCodegenVectorMispredictFix, true};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: vector)
return a.XX + a.YY + a.ZZ
end
)"),
R"(
; function foo($arg0) line 2
bb_0:
CHECK_TAG R0, tvector, exit(entry)
JUMP bb_2
bb_2:
JUMP bb_bytecode_1
bb_bytecode_1:
FALLBACK_GETTABLEKS 0u, R3, R0, K0
FALLBACK_GETTABLEKS 2u, R4, R0, K1
CHECK_TAG R3, tnumber, bb_fallback_3
CHECK_TAG R4, tnumber, bb_fallback_3
%14 = LOAD_DOUBLE R3
%16 = ADD_NUM %14, R4
STORE_DOUBLE R2, %16
STORE_TAG R2, tnumber
JUMP bb_4
bb_4:
CHECK_TAG R0, tvector, exit(5)
FALLBACK_GETTABLEKS 5u, R3, R0, K2
CHECK_TAG R2, tnumber, bb_fallback_5
CHECK_TAG R3, tnumber, bb_fallback_5
%30 = LOAD_DOUBLE R2
%32 = ADD_NUM %30, R3
STORE_DOUBLE R1, %32
STORE_TAG R1, tnumber
JUMP bb_6
bb_6:
INTERRUPT 8u
RETURN R1, 1i
)");
}
TEST_CASE("UserDataGetIndex") TEST_CASE("UserDataGetIndex")
{ {
ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true}; ScopedFastFlag luauCodegenDirectUserdataFlow{FFlag::LuauCodegenDirectUserdataFlow, true};
@ -860,4 +922,422 @@ bb_bytecode_0:
)"); )");
} }
TEST_CASE("ResolveTablePathTypes")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(arr: {Vertex}, i)
local v = arr[i]
return v.pos.Y
end
)",
/* includeIrTypes */ true, /* debugLevel */ 2),
R"(
; function foo(arr, i) line 4
; R0: table [argument 'arr']
; R2: table from 0 to 6 [local 'v']
; R4: vector from 3 to 5
bb_0:
CHECK_TAG R0, ttable, exit(entry)
JUMP bb_2
bb_2:
JUMP bb_bytecode_1
bb_bytecode_1:
CHECK_TAG R1, tnumber, bb_fallback_3
%8 = LOAD_POINTER R0
%9 = LOAD_DOUBLE R1
%10 = TRY_NUM_TO_INDEX %9, bb_fallback_3
%11 = SUB_INT %10, 1i
CHECK_ARRAY_SIZE %8, %11, bb_fallback_3
CHECK_NO_METATABLE %8, bb_fallback_3
%14 = GET_ARR_ADDR %8, %11
%15 = LOAD_TVALUE %14
STORE_TVALUE R2, %15
JUMP bb_4
bb_4:
CHECK_TAG R2, ttable, exit(1)
%23 = LOAD_POINTER R2
%24 = GET_SLOT_NODE_ADDR %23, 1u, K0
CHECK_SLOT_MATCH %24, K0, bb_fallback_5
%26 = LOAD_TVALUE %24, 0i
STORE_TVALUE R4, %26
JUMP bb_6
bb_6:
CHECK_TAG R4, tvector, exit(3)
%33 = LOAD_FLOAT R4, 4i
STORE_DOUBLE R3, %33
STORE_TAG R3, tnumber
INTERRUPT 5u
RETURN R3, 1i
)");
}
TEST_CASE("ResolvableSimpleMath")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
type Vertex = { p: vector, uv: vector, n: vector, t: vector, b: vector, h: number }
local mesh: { vertices: {Vertex}, indices: {number} } = ...
local function compute()
for i = 1,#mesh.indices,3 do
local a = mesh.vertices[mesh.indices[i]]
local b = mesh.vertices[mesh.indices[i + 1]]
local c = mesh.vertices[mesh.indices[i + 2]]
local vba = b.p - a.p
local vca = c.p - a.p
local uvba = b.uv - a.uv
local uvca = c.uv - a.uv
local r = 1.0 / (uvba.X * uvca.Y - uvca.X * uvba.Y);
local sdir = (uvca.Y * vba - uvba.Y * vca) * r
a.t += sdir
end
end
)"),
R"(
; function compute() line 5
; U0: table ['mesh']
; R2: number from 0 to 78 [local 'i']
; R3: table from 7 to 78 [local 'a']
; R4: table from 15 to 78 [local 'b']
; R5: table from 24 to 78 [local 'c']
; R6: vector from 33 to 78 [local 'vba']
; R7: vector from 37 to 38
; R7: vector from 38 to 78 [local 'vca']
; R8: vector from 37 to 38
; R8: vector from 42 to 43
; R8: vector from 43 to 78 [local 'uvba']
; R9: vector from 42 to 43
; R9: vector from 47 to 48
; R9: vector from 48 to 78 [local 'uvca']
; R10: vector from 47 to 48
; R10: vector from 52 to 53
; R10: number from 53 to 78 [local 'r']
; R11: vector from 52 to 53
; R11: vector from 65 to 78 [local 'sdir']
; R12: vector from 72 to 73
; R12: vector from 75 to 76
; R13: vector from 71 to 72
; R14: vector from 71 to 72
)");
}
TEST_CASE("ResolveVectorNamecalls")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}, {FFlag::LuauCodegenDirectUserdataFlow, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(arr: {Vertex}, i)
return arr[i].normal:Dot(vector(0.707, 0, 0.707))
end
)",
/* includeIrTypes */ true),
R"(
; function foo($arg0, $arg1) line 4
; R0: table [argument]
; R2: vector from 4 to 6
bb_0:
CHECK_TAG R0, ttable, exit(entry)
JUMP bb_2
bb_2:
JUMP bb_bytecode_1
bb_bytecode_1:
CHECK_TAG R1, tnumber, bb_fallback_3
%8 = LOAD_POINTER R0
%9 = LOAD_DOUBLE R1
%10 = TRY_NUM_TO_INDEX %9, bb_fallback_3
%11 = SUB_INT %10, 1i
CHECK_ARRAY_SIZE %8, %11, bb_fallback_3
CHECK_NO_METATABLE %8, bb_fallback_3
%14 = GET_ARR_ADDR %8, %11
%15 = LOAD_TVALUE %14
STORE_TVALUE R3, %15
JUMP bb_4
bb_4:
CHECK_TAG R3, ttable, bb_fallback_5
%23 = LOAD_POINTER R3
%24 = GET_SLOT_NODE_ADDR %23, 1u, K0
CHECK_SLOT_MATCH %24, K0, bb_fallback_5
%26 = LOAD_TVALUE %24, 0i
STORE_TVALUE R2, %26
JUMP bb_6
bb_6:
%31 = LOAD_TVALUE K1, 0i, tvector
STORE_TVALUE R4, %31
CHECK_TAG R2, tvector, exit(4)
FALLBACK_NAMECALL 4u, R2, R2, K2
INTERRUPT 6u
SET_SAVEDPC 7u
CALL R2, 2i, -1i
INTERRUPT 7u
RETURN R2, -1i
)");
}
TEST_CASE("ImmediateTypeAnnotationHelp")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(arr, i)
return (arr[i] :: vector) / 5
end
)",
/* includeIrTypes */ true),
R"(
; function foo($arg0, $arg1) line 2
; R3: vector from 1 to 2
bb_bytecode_0:
CHECK_TAG R0, ttable, bb_fallback_1
CHECK_TAG R1, tnumber, bb_fallback_1
%4 = LOAD_POINTER R0
%5 = LOAD_DOUBLE R1
%6 = TRY_NUM_TO_INDEX %5, bb_fallback_1
%7 = SUB_INT %6, 1i
CHECK_ARRAY_SIZE %4, %7, bb_fallback_1
CHECK_NO_METATABLE %4, bb_fallback_1
%10 = GET_ARR_ADDR %4, %7
%11 = LOAD_TVALUE %10
STORE_TVALUE R3, %11
JUMP bb_2
bb_2:
CHECK_TAG R3, tvector, exit(1)
%19 = LOAD_TVALUE R3
%20 = NUM_TO_VEC 5
%21 = DIV_VEC %19, %20
%22 = TAG_VECTOR %21
STORE_TVALUE R2, %22
INTERRUPT 2u
RETURN R2, 1i
)");
}
TEST_CASE("UnaryTypeResolve")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
local function foo(a, b: vector, c)
local d = not a
local e = -b
local f = #c
return (if d then e else vector(f, 2, 3)).X
end
)"),
R"(
; function foo(a, b, c) line 2
; R1: vector [argument 'b']
; R3: boolean from 0 to 16 [local 'd']
; R4: vector from 1 to 16 [local 'e']
; R5: number from 2 to 16 [local 'f']
; R7: vector from 13 to 15
)");
}
TEST_CASE("ForInManualAnnotation")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(a: {Vertex})
local sum = 0
for k, v: Vertex in ipairs(a) do
sum += v.pos.X
end
return sum
end
)",
/* includeIrTypes */ true, /* debugLevel */ 2),
R"(
; function foo(a) line 4
; R0: table [argument 'a']
; R1: number from 0 to 14 [local 'sum']
; R5: number from 5 to 11 [local 'k']
; R6: table from 5 to 11 [local 'v']
; R8: vector from 8 to 10
bb_0:
CHECK_TAG R0, ttable, exit(entry)
JUMP bb_4
bb_4:
JUMP bb_bytecode_1
bb_bytecode_1:
STORE_DOUBLE R1, 0
STORE_TAG R1, tnumber
CHECK_SAFE_ENV exit(1)
JUMP_EQ_TAG K1, tnil, bb_fallback_6, bb_5
bb_5:
%9 = LOAD_TVALUE K1
STORE_TVALUE R2, %9
JUMP bb_7
bb_7:
%15 = LOAD_TVALUE R0
STORE_TVALUE R3, %15
INTERRUPT 4u
SET_SAVEDPC 5u
CALL R2, 1i, 3i
CHECK_SAFE_ENV exit(5)
CHECK_TAG R3, ttable, bb_fallback_8
CHECK_TAG R4, tnumber, bb_fallback_8
JUMP_CMP_NUM R4, 0, not_eq, bb_fallback_8, bb_9
bb_9:
STORE_TAG R2, tnil
STORE_POINTER R4, 0i
STORE_EXTRA R4, 128i
STORE_TAG R4, tlightuserdata
JUMP bb_bytecode_3
bb_bytecode_2:
CHECK_TAG R6, ttable, exit(6)
%35 = LOAD_POINTER R6
%36 = GET_SLOT_NODE_ADDR %35, 6u, K2
CHECK_SLOT_MATCH %36, K2, bb_fallback_10
%38 = LOAD_TVALUE %36, 0i
STORE_TVALUE R8, %38
JUMP bb_11
bb_11:
CHECK_TAG R8, tvector, exit(8)
%45 = LOAD_FLOAT R8, 0i
STORE_DOUBLE R7, %45
STORE_TAG R7, tnumber
CHECK_TAG R1, tnumber, exit(10)
%52 = LOAD_DOUBLE R1
%54 = ADD_NUM %52, %45
STORE_DOUBLE R1, %54
JUMP bb_bytecode_3
bb_bytecode_3:
INTERRUPT 11u
CHECK_TAG R2, tnil, bb_fallback_13
%60 = LOAD_POINTER R3
%61 = LOAD_INT R4
%62 = GET_ARR_ADDR %60, %61
CHECK_ARRAY_SIZE %60, %61, bb_12
%64 = LOAD_TAG %62
JUMP_EQ_TAG %64, tnil, bb_12, bb_14
bb_14:
%66 = ADD_INT %61, 1i
STORE_INT R4, %66
%68 = INT_TO_NUM %66
STORE_DOUBLE R5, %68
STORE_TAG R5, tnumber
%71 = LOAD_TVALUE %62
STORE_TVALUE R6, %71
JUMP bb_bytecode_2
bb_12:
INTERRUPT 13u
RETURN R1, 1i
)");
}
TEST_CASE("ForInAutoAnnotationIpairs")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(a: {Vertex})
local sum = 0
for k, v in ipairs(a) do
local n = v.pos.X
sum += n
end
return sum
end
)"),
R"(
; function foo(a) line 4
; R0: table [argument 'a']
; R1: number from 0 to 14 [local 'sum']
; R5: number from 5 to 11 [local 'k']
; R6: table from 5 to 11 [local 'v']
; R7: number from 6 to 11 [local 'n']
; R8: vector from 8 to 10
)");
}
TEST_CASE("ForInAutoAnnotationPairs")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(a: {[string]: Vertex})
local sum = 0
for k, v in pairs(a) do
local n = v.pos.X
sum += n
end
return sum
end
)"),
R"(
; function foo(a) line 4
; R0: table [argument 'a']
; R1: number from 0 to 14 [local 'sum']
; R5: string from 5 to 11 [local 'k']
; R6: table from 5 to 11 [local 'v']
; R7: number from 6 to 11 [local 'n']
; R8: vector from 8 to 10
)");
}
TEST_CASE("ForInAutoAnnotationGeneric")
{
ScopedFastFlag sffs[]{{FFlag::LuauLoadTypeInfo, true}, {FFlag::LuauCompileTypeInfo, true}, {FFlag::LuauCodegenTypeInfo, true},
{FFlag::LuauCodegenRemoveDeadStores5, true}, {FFlag::LuauTypeInfoLookupImprovement, true}, {FFlag::LuauCodegenIrTypeNames, true},
{FFlag::LuauCompileTempTypeInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
type Vertex = {pos: vector, normal: vector}
local function foo(a: {Vertex})
local sum = 0
for k, v in a do
local n = v.pos.X
sum += n
end
return sum
end
)"),
R"(
; function foo(a) line 4
; R0: table [argument 'a']
; R1: number from 0 to 13 [local 'sum']
; R5: number from 4 to 10 [local 'k']
; R6: table from 4 to 10 [local 'v']
; R7: number from 5 to 10 [local 'n']
; R8: vector from 7 to 9
)");
}
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -14,7 +14,7 @@ LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
LUAU_FASTFLAG(LuauFixNormalizeCaching) LUAU_FASTFLAG(LuauFixNormalizeCaching)
LUAU_FASTFLAG(LuauNormalizeNotUnknownIntersection) LUAU_FASTFLAG(LuauNormalizeNotUnknownIntersection)
LUAU_FASTFLAG(LuauFixCyclicUnionsOfIntersections); LUAU_FASTFLAG(LuauFixCyclicUnionsOfIntersections);
LUAU_FASTINT(LuauTypeInferRecursionLimit)
using namespace Luau; using namespace Luau;
namespace namespace
@ -962,4 +962,32 @@ TEST_CASE_FIXTURE(NormalizeFixture, "intersect_with_not_unknown")
CHECK("never" == toString(normalizer.typeFromNormal(*normalized.get()))); CHECK("never" == toString(normalizer.typeFromNormal(*normalized.get())));
} }
TEST_CASE_FIXTURE(NormalizeFixture, "cyclic_stack_overflow_1")
{
ScopedFastInt sfi{FInt::LuauTypeInferRecursionLimit, 165};
this->unifierState.counters.recursionLimit = FInt::LuauTypeInferRecursionLimit;
TypeId t1 = arena.addType(TableType{});
TypeId t2 = arena.addType(TableType{});
TypeId t3 = arena.addType(IntersectionType{{t1, t2}});
asMutable(t1)->ty.get_if<TableType>()->props = {{"foo", Property::readonly(t2)}};
asMutable(t2)->ty.get_if<TableType>()->props = {{"foo", Property::readonly(t1)}};
std::shared_ptr<const NormalizedType> normalized = normalizer.normalize(t3);
CHECK(normalized);
}
TEST_CASE_FIXTURE(NormalizeFixture, "cyclic_stack_overflow_2")
{
ScopedFastInt sfi{FInt::LuauTypeInferRecursionLimit, 165};
this->unifierState.counters.recursionLimit = FInt::LuauTypeInferRecursionLimit;
TypeId t1 = arena.addType(TableType{});
TypeId t2 = arena.addType(TableType{});
TypeId t3 = arena.addType(IntersectionType{{t1, t2}});
asMutable(t1)->ty.get_if<TableType>()->props = {{"foo", Property::readonly(t3)}};
asMutable(t2)->ty.get_if<TableType>()->props = {{"foo", Property::readonly(t1)}};
std::shared_ptr<const NormalizedType> normalized = normalizer.normalize(t3);
CHECK(normalized);
}
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -17,6 +17,39 @@ LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls);
TEST_SUITE_BEGIN("TypeInferClasses"); TEST_SUITE_BEGIN("TypeInferClasses");
TEST_CASE_FIXTURE(ClassFixture, "Luau.Analyze.CLI_crashes_on_this_test")
{
CheckResult result = check(R"(
local CircularQueue = {}
CircularQueue.__index = CircularQueue
function CircularQueue:new()
local newCircularQueue = {
head = nil,
}
setmetatable(newCircularQueue, CircularQueue)
return newCircularQueue
end
function CircularQueue:push()
local newListNode
if self.head then
newListNode = {
prevNode = self.head.prevNode,
nextNode = self.head,
}
newListNode.prevNode.nextNode = newListNode
newListNode.nextNode.prevNode = newListNode
end
end
return CircularQueue
)");
}
TEST_CASE_FIXTURE(ClassFixture, "call_method_of_a_class") TEST_CASE_FIXTURE(ClassFixture, "call_method_of_a_class")
{ {
CheckResult result = check(R"( CheckResult result = check(R"(

View file

@ -4423,4 +4423,20 @@ TEST_CASE_FIXTURE(Fixture, "setindexer_multiple_tables_intersection")
CHECK("({ [string]: number } & { [thread]: boolean }, boolean | number) -> ()" == toString(requireType("f"))); CHECK("({ [string]: number } & { [thread]: boolean }, boolean | number) -> ()" == toString(requireType("f")));
} }
TEST_CASE_FIXTURE(Fixture, "insert_a_and_f_of_a_into_table_res_in_a_loop")
{
CheckResult result = check(R"(
local function f(t)
local res = {}
for k, a in t do
res[k] = f(a)
res[k] = a
end
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -782,7 +782,10 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "no_heap_use_after_free_error")
end end
)"); )");
LUAU_REQUIRE_ERRORS(result); if (FFlag::DebugLuauDeferredConstraintResolution)
LUAU_REQUIRE_NO_ERRORS(result);
else
LUAU_REQUIRE_ERRORS(result);
} }
TEST_CASE_FIXTURE(Fixture, "infer_type_assertion_value_type") TEST_CASE_FIXTURE(Fixture, "infer_type_assertion_value_type")
@ -1540,19 +1543,33 @@ TEST_CASE_FIXTURE(Fixture, "typeof_cannot_refine_builtin_alias")
)"); )");
} }
/* TEST_CASE_FIXTURE(BuiltinsFixture, "bad_iter_metamethod")
* We had an issue where we tripped the canMutate() check when binding one
* blocked type to another.
*/
TEST_CASE_FIXTURE(Fixture, "delay_setIndexer_constraint_if_the_indexers_type_is_blocked")
{ {
(void) check(R"( CheckResult result = check(R"(
local SG = GetService(true) function iter(): unknown
local lines: { [string]: typeof(SG.ScreenGui) } = {} return nil
lines[deadline] = nil -- This line end
local a = {__iter = iter}
setmetatable(a, a)
for i in a do
end
)"); )");
// As long as type inference doesn't trip an assert or crash, we're good! if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CannotCallNonFunction* ccnf = get<CannotCallNonFunction>(result.errors[0]);
REQUIRE(ccnf);
CHECK("unknown" == toString(ccnf->ty));
}
else
{
LUAU_REQUIRE_NO_ERRORS(result);
}
} }
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -490,5 +490,34 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "typestates_do_not_apply_to_the_initial_local
CHECK("number" == toString(requireTypeAtPosition({5, 14}), {true})); CHECK("number" == toString(requireTypeAtPosition({5, 14}), {true}));
} }
TEST_CASE_FIXTURE(Fixture, "typestate_globals")
{
ScopedFastFlag sff{FFlag::DebugLuauDeferredConstraintResolution, true};
loadDefinition(R"(
declare foo: string | number
declare function f(x: string): ()
)");
CheckResult result = check(R"(
foo = "a"
f(foo)
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(Fixture, "typestate_unknown_global")
{
ScopedFastFlag sff{FFlag::DebugLuauDeferredConstraintResolution, true};
CheckResult result = check(R"(
x = 5
)");
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK(get<UnknownSymbol>(result.errors[0]));
}
TEST_SUITE_END(); TEST_SUITE_END();

View file

@ -306,10 +306,14 @@ end
assert(table.maxn{} == 0) assert(table.maxn{} == 0)
assert(table.maxn{[-100] = 1} == 0)
assert(table.maxn{["1000"] = true} == 0) assert(table.maxn{["1000"] = true} == 0)
assert(table.maxn{["1000"] = true, [24.5] = 3} == 24.5) assert(table.maxn{["1000"] = true, [24.5] = 3} == 24.5)
assert(table.maxn{[1000] = true} == 1000) assert(table.maxn{[1000] = true} == 1000)
assert(table.maxn{[10] = true, [100*math.pi] = print} == 100*math.pi) assert(table.maxn{[10] = true, [100*math.pi] = print} == 100*math.pi)
a = {[10] = 1, [20] = 2}
a[20] = nil
assert(table.maxn(a) == 10)
-- int overflow -- int overflow

View file

@ -37,7 +37,6 @@ Differ.metatable_metamissing_left
Differ.metatable_metamissing_right Differ.metatable_metamissing_right
Differ.metatable_metanormal Differ.metatable_metanormal
Differ.negation Differ.negation
FrontendTest.accumulate_cached_errors_in_consistent_order
FrontendTest.environments FrontendTest.environments
FrontendTest.imported_table_modification_2 FrontendTest.imported_table_modification_2
FrontendTest.it_should_be_safe_to_stringify_errors_when_full_type_graph_is_discarded FrontendTest.it_should_be_safe_to_stringify_errors_when_full_type_graph_is_discarded
@ -182,6 +181,7 @@ TableTests.infer_array
TableTests.infer_indexer_from_array_like_table TableTests.infer_indexer_from_array_like_table
TableTests.infer_indexer_from_its_variable_type_and_unifiable TableTests.infer_indexer_from_its_variable_type_and_unifiable
TableTests.inferred_return_type_of_free_table TableTests.inferred_return_type_of_free_table
TableTests.insert_a_and_f_of_a_into_table_res_in_a_loop
TableTests.invariant_table_properties_means_instantiating_tables_in_assignment_is_unsound TableTests.invariant_table_properties_means_instantiating_tables_in_assignment_is_unsound
TableTests.invariant_table_properties_means_instantiating_tables_in_call_is_unsound TableTests.invariant_table_properties_means_instantiating_tables_in_call_is_unsound
TableTests.length_operator_union TableTests.length_operator_union
@ -269,7 +269,6 @@ TypeInfer.dont_report_type_errors_within_an_AstExprError
TypeInfer.dont_report_type_errors_within_an_AstStatError TypeInfer.dont_report_type_errors_within_an_AstStatError
TypeInfer.globals TypeInfer.globals
TypeInfer.globals2 TypeInfer.globals2
TypeInfer.globals_are_banned_in_strict_mode
TypeInfer.infer_through_group_expr TypeInfer.infer_through_group_expr
TypeInfer.no_stack_overflow_from_isoptional TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter
@ -366,7 +365,6 @@ TypeInferLoops.properly_infer_iteratee_is_a_free_table
TypeInferLoops.repeat_loop TypeInferLoops.repeat_loop
TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free
TypeInferLoops.while_loop TypeInferLoops.while_loop
TypeInferModules.custom_require_global
TypeInferModules.do_not_modify_imported_types_5 TypeInferModules.do_not_modify_imported_types_5
TypeInferModules.require TypeInferModules.require
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2 TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2

94
tools/stackdbg.py Normal file
View file

@ -0,0 +1,94 @@
#!usr/bin/python3
"""
To use this command, simply run the command:
`command script import /path/to/your/game-engine/Client/Luau/tools/stackdbg.py`
in the `lldb` interpreter. You can also add it to your .lldbinit file to have it be
automatically imported.
If using vscode, you can add the above command to your launch.json under `preRunCommands` for the appropriate target. For example:
{
"name": "Luau.UnitTest",
"type": "lldb",
"request": "launch",
"program": "${workspaceFolder}/build/ninja/common-tests/noopt/Luau/Luau.UnitTest",
"preRunCommands": [
"command script import ${workspaceFolder}/Client/Luau/tools/stackdbg.py"
],
}
Once this is loaded,
`(lldb) help stack`
or
`(lldb) stack -h
or
`(lldb) stack --help
can get you started
"""
import lldb
import functools
import argparse
import shlex
# Dumps the collected frame data
def dump(collected):
for (frame_name, size_in_kb, live_size_kb, variables) in collected:
print(f'{frame_name}, locals: {size_in_kb}kb, fp-sp: {live_size_kb}kb')
for (var_name, var_size, variable_obj) in variables:
print(f' {var_name}, {var_size} bytes')
def dbg_stack_pressure(frame, frames_to_show = 5, sort_frames = False, vars_to_show = 5, sort_vars = True):
totalKb = 0
collect = []
for f in frame.thread:
frame_name = f.GetFunctionName()
variables = [ (v.GetName(), v.GetByteSize(), v) for v in f.get_locals() ]
if sort_vars:
variables.sort(key = lambda x: x[1], reverse = True)
size_in_kb = functools.reduce(lambda x,y : x + y[1], variables, 0) / 1024
fp = f.GetFP()
sp = f.GetSP()
live_size_kb = round((fp - sp) / 1024, 2)
size_in_kb = round(size_in_kb, 2)
totalKb += size_in_kb
collect.append((frame_name, size_in_kb, live_size_kb, variables[:vars_to_show]))
if sort_frames:
collect.sort(key = lambda x: x[1], reverse = True)
print("******************** Report Stack Usage ********************")
totalMb = round(totalKb / 1024, 2)
print(f'{len(frame.thread)} stack frames used {totalMb}MB')
dump(collect[:frames_to_show])
def stack(debugger, command, result, internal_dict):
"""
usage: [-h] [-f FRAMES] [-fd] [-v VARS] [-vd]
optional arguments:
-h, --help show this help message and exit
-f FRAMES, --frames FRAMES
How many stack frames to display
-fd, --sort_frames Sort frames
-v VARS, --vars VARS How many variables per frame to display
-vd, --sort_vars Sort frames
"""
frame = debugger.GetSelectedTarget().GetProcess().GetSelectedThread().GetSelectedFrame()
args = shlex.split(command)
argparser = argparse.ArgumentParser(allow_abbrev = True)
argparser.add_argument("-f", "--frames", required=False, help="How many stack frames to display", default=5, type=int)
argparser.add_argument("-fd", "--sort_frames", required=False, help="Sort frames in descending order of stack usage", action="store_true", default=False)
argparser.add_argument("-v", "--vars", required=False, help="How many variables per frame to display", default=5, type=int)
argparser.add_argument("-vd", "--sort_vars", required=False, help="Sort locals in descending order of stack usage ", action="store_true", default=False)
args = argparser.parse_args(args)
dbg_stack_pressure(frame, frames_to_show=args.frames, sort_frames=args.sort_frames, vars_to_show=args.vars, sort_vars=args.sort_vars)
# Initialization code to add commands
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f stackdbg.stack stack')
print("The 'stack' python command has been installed and is ready for use.")