2022-10-14 20:48:41 +01:00
|
|
|
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
|
|
|
#include "EmitCommonX64.h"
|
|
|
|
|
|
|
|
#include "Luau/AssemblyBuilderX64.h"
|
2023-03-31 19:42:49 +01:00
|
|
|
#include "Luau/IrCallWrapperX64.h"
|
2023-03-03 20:21:14 +00:00
|
|
|
#include "Luau/IrData.h"
|
2023-03-31 19:42:49 +01:00
|
|
|
#include "Luau/IrRegAllocX64.h"
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
#include "CustomExecUtils.h"
|
|
|
|
#include "NativeState.h"
|
|
|
|
|
|
|
|
#include "lgc.h"
|
|
|
|
#include "lstate.h"
|
|
|
|
|
|
|
|
namespace Luau
|
|
|
|
{
|
|
|
|
namespace CodeGen
|
|
|
|
{
|
2023-03-03 20:21:14 +00:00
|
|
|
namespace X64
|
|
|
|
{
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2023-03-03 20:21:14 +00:00
|
|
|
void jumpOnNumberCmp(AssemblyBuilderX64& build, RegisterX64 tmp, OperandX64 lhs, OperandX64 rhs, IrCondition cond, Label& label)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
|
|
|
// Refresher on comi/ucomi EFLAGS:
|
|
|
|
// CF only: less
|
|
|
|
// ZF only: equal
|
|
|
|
// PF+CF+ZF: unordered (NaN)
|
|
|
|
|
|
|
|
if (rhs.cat == CategoryX64::reg)
|
|
|
|
{
|
|
|
|
build.vucomisd(rhs, lhs);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
build.vmovsd(tmp, rhs);
|
|
|
|
build.vucomisd(tmp, lhs);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep in mind that 'Not' conditions want 'true' for comparisons with NaN
|
|
|
|
// And because of NaN, integer check interchangeability like 'not less or equal' <-> 'greater' does not hold
|
|
|
|
switch (cond)
|
|
|
|
{
|
2023-03-03 20:21:14 +00:00
|
|
|
case IrCondition::NotLessEqual:
|
2022-10-14 20:48:41 +01:00
|
|
|
// (b < a) is the same as !(a <= b). jnae checks CF=1 which means < or NaN
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::NotAboveEqual, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
break;
|
2023-03-03 20:21:14 +00:00
|
|
|
case IrCondition::LessEqual:
|
2022-10-14 20:48:41 +01:00
|
|
|
// (b >= a) is the same as (a <= b). jae checks CF=0 which means >= and not NaN
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::AboveEqual, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
break;
|
2023-03-03 20:21:14 +00:00
|
|
|
case IrCondition::NotLess:
|
2022-10-14 20:48:41 +01:00
|
|
|
// (b <= a) is the same as !(a < b). jna checks CF=1 or ZF=1 which means <= or NaN
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::NotAbove, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
break;
|
2023-03-03 20:21:14 +00:00
|
|
|
case IrCondition::Less:
|
2022-10-14 20:48:41 +01:00
|
|
|
// (b > a) is the same as (a < b). ja checks CF=0 and ZF=0 which means > and not NaN
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Above, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
break;
|
2023-03-03 20:21:14 +00:00
|
|
|
case IrCondition::NotEqual:
|
2022-10-14 20:48:41 +01:00
|
|
|
// ZF=0 or PF=1 means != or NaN
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::NotZero, label);
|
|
|
|
build.jcc(ConditionX64::Parity, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
LUAU_ASSERT(!"Unsupported condition");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void jumpOnAnyCmpFallback(IrRegAllocX64& regs, AssemblyBuilderX64& build, int ra, int rb, IrCondition cond, Label& label)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(rb));
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2023-03-03 20:21:14 +00:00
|
|
|
if (cond == IrCondition::NotLessEqual || cond == IrCondition::LessEqual)
|
2023-03-31 19:42:49 +01:00
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_lessequal)]);
|
2023-03-03 20:21:14 +00:00
|
|
|
else if (cond == IrCondition::NotLess || cond == IrCondition::Less)
|
2023-03-31 19:42:49 +01:00
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_lessthan)]);
|
2023-03-03 20:21:14 +00:00
|
|
|
else if (cond == IrCondition::NotEqual || cond == IrCondition::Equal)
|
2023-03-31 19:42:49 +01:00
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_equalval)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
else
|
|
|
|
LUAU_ASSERT(!"Unsupported condition");
|
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
build.test(eax, eax);
|
2023-03-03 20:21:14 +00:00
|
|
|
build.jcc(cond == IrCondition::NotLessEqual || cond == IrCondition::NotLess || cond == IrCondition::NotEqual ? ConditionX64::Zero
|
|
|
|
: ConditionX64::NotZero,
|
2022-11-04 17:33:22 +00:00
|
|
|
label);
|
2022-10-14 20:48:41 +01:00
|
|
|
}
|
|
|
|
|
2022-12-09 19:57:01 +00:00
|
|
|
void getTableNodeAtCachedSlot(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 node, RegisterX64 table, int pcpos)
|
2022-10-21 18:54:01 +01:00
|
|
|
{
|
|
|
|
LUAU_ASSERT(tmp != node);
|
|
|
|
LUAU_ASSERT(table != node);
|
|
|
|
|
|
|
|
build.mov(node, qword[table + offsetof(Table, node)]);
|
|
|
|
|
|
|
|
// compute cached slot
|
|
|
|
build.mov(tmp, sCode);
|
|
|
|
build.movzx(dwordReg(tmp), byte[tmp + pcpos * sizeof(Instruction) + kOffsetOfInstructionC]);
|
|
|
|
build.and_(byteReg(tmp), byte[table + offsetof(Table, nodemask8)]);
|
|
|
|
|
|
|
|
// LuaNode* n = &h->node[slot];
|
|
|
|
build.shl(dwordReg(tmp), kLuaNodeSizeLog2);
|
|
|
|
build.add(node, tmp);
|
|
|
|
}
|
|
|
|
|
2022-12-09 19:57:01 +00:00
|
|
|
void convertNumberToIndexOrJump(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 numd, RegisterX64 numi, Label& label)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
|
|
|
LUAU_ASSERT(numi.size == SizeX64::dword);
|
|
|
|
|
|
|
|
// Convert to integer, NaN is converted into 0x80000000
|
|
|
|
build.vcvttsd2si(numi, numd);
|
|
|
|
|
|
|
|
// Convert that integer back to double
|
|
|
|
build.vcvtsi2sd(tmp, numd, numi);
|
|
|
|
|
|
|
|
build.vucomisd(tmp, numd); // Sets ZF=1 if equal or NaN
|
|
|
|
// We don't need non-integer values
|
|
|
|
// But to skip the PF=1 check, we proceed with NaN because 0x80000000 index is out of bounds
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::NotZero, label);
|
2022-10-14 20:48:41 +01:00
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void callArithHelper(IrRegAllocX64& regs, AssemblyBuilderX64& build, int ra, int rb, OperandX64 c, TMS tm)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(rb));
|
|
|
|
callWrap.addArgument(SizeX64::qword, c);
|
|
|
|
callWrap.addArgument(SizeX64::dword, tm);
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_doarith)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void callLengthHelper(IrRegAllocX64& regs, AssemblyBuilderX64& build, int ra, int rb)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(rb));
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_dolen)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void callPrepareForN(IrRegAllocX64& regs, AssemblyBuilderX64& build, int limit, int step, int init)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(limit));
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(step));
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(init));
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_prepareFORN)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void callGetTable(IrRegAllocX64& regs, AssemblyBuilderX64& build, int rb, OperandX64 c, int ra)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(rb));
|
|
|
|
callWrap.addArgument(SizeX64::qword, c);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_gettable)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void callSetTable(IrRegAllocX64& regs, AssemblyBuilderX64& build, int rb, OperandX64 c, int ra)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
2023-03-31 19:42:49 +01:00
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(rb));
|
|
|
|
callWrap.addArgument(SizeX64::qword, c);
|
|
|
|
callWrap.addArgument(SizeX64::qword, luauRegAddress(ra));
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaV_settable)]);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
}
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
void checkObjectBarrierConditions(AssemblyBuilderX64& build, RegisterX64 tmp, RegisterX64 object, int ra, Label& skip)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
|
|
|
// iscollectable(ra)
|
|
|
|
build.cmp(luauRegTag(ra), LUA_TSTRING);
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Less, skip);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2022-10-21 18:54:01 +01:00
|
|
|
// isblack(obj2gco(o))
|
|
|
|
build.test(byte[object + offsetof(GCheader, marked)], bitmask(BLACKBIT));
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Zero, skip);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
// iswhite(gcvalue(ra))
|
|
|
|
build.mov(tmp, luauRegValue(ra));
|
|
|
|
build.test(byte[tmp + offsetof(GCheader, marked)], bit2mask(WHITE0BIT, WHITE1BIT));
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Zero, skip);
|
2022-10-21 18:54:01 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
void callBarrierObject(IrRegAllocX64& regs, AssemblyBuilderX64& build, RegisterX64 object, IrOp objectOp, int ra)
|
2022-10-21 18:54:01 +01:00
|
|
|
{
|
2023-04-07 22:01:29 +01:00
|
|
|
Label skip;
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
ScopedRegX64 tmp{regs, SizeX64::qword};
|
|
|
|
checkObjectBarrierConditions(build, tmp.reg, object, ra, skip);
|
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
{
|
|
|
|
ScopedSpills spillGuard(regs);
|
|
|
|
|
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, object, objectOp);
|
|
|
|
callWrap.addArgument(SizeX64::qword, tmp);
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaC_barrierf)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
build.setLabel(skip);
|
2022-10-21 18:54:01 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
void callBarrierTableFast(IrRegAllocX64& regs, AssemblyBuilderX64& build, RegisterX64 table, IrOp tableOp)
|
2022-10-21 18:54:01 +01:00
|
|
|
{
|
2023-04-07 22:01:29 +01:00
|
|
|
Label skip;
|
|
|
|
|
2022-10-21 18:54:01 +01:00
|
|
|
// isblack(obj2gco(t))
|
|
|
|
build.test(byte[table + offsetof(GCheader, marked)], bitmask(BLACKBIT));
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Zero, skip);
|
2022-10-21 18:54:01 +01:00
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
{
|
|
|
|
ScopedSpills spillGuard(regs);
|
|
|
|
|
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::qword, table, tableOp);
|
|
|
|
callWrap.addArgument(SizeX64::qword, addr[table + offsetof(Table, gclist)]);
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaC_barrierback)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
build.setLabel(skip);
|
2022-10-21 18:54:01 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
void callStepGc(IrRegAllocX64& regs, AssemblyBuilderX64& build)
|
2022-10-21 18:54:01 +01:00
|
|
|
{
|
2023-04-07 22:01:29 +01:00
|
|
|
Label skip;
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
{
|
|
|
|
ScopedRegX64 tmp1{regs, SizeX64::qword};
|
|
|
|
ScopedRegX64 tmp2{regs, SizeX64::qword};
|
2022-10-21 18:54:01 +01:00
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
build.mov(tmp1.reg, qword[rState + offsetof(lua_State, global)]);
|
|
|
|
build.mov(tmp2.reg, qword[tmp1.reg + offsetof(global_State, totalbytes)]);
|
|
|
|
build.cmp(tmp2.reg, qword[tmp1.reg + offsetof(global_State, GCthreshold)]);
|
|
|
|
build.jcc(ConditionX64::Below, skip);
|
|
|
|
}
|
2022-10-21 18:54:01 +01:00
|
|
|
|
2023-04-07 22:01:29 +01:00
|
|
|
{
|
|
|
|
ScopedSpills spillGuard(regs);
|
|
|
|
|
|
|
|
IrCallWrapperX64 callWrap(regs, build);
|
|
|
|
callWrap.addArgument(SizeX64::qword, rState);
|
|
|
|
callWrap.addArgument(SizeX64::dword, 1);
|
|
|
|
callWrap.call(qword[rNativeContext + offsetof(NativeContext, luaC_step)]);
|
|
|
|
emitUpdateBase(build);
|
|
|
|
}
|
|
|
|
|
|
|
|
build.setLabel(skip);
|
2022-10-14 20:48:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void emitExit(AssemblyBuilderX64& build, bool continueInVm)
|
|
|
|
{
|
|
|
|
if (continueInVm)
|
|
|
|
build.mov(al, 1);
|
|
|
|
else
|
|
|
|
build.xor_(eax, eax);
|
|
|
|
|
|
|
|
build.jmp(qword[rNativeContext + offsetof(NativeContext, gateExit)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
void emitUpdateBase(AssemblyBuilderX64& build)
|
|
|
|
{
|
|
|
|
build.mov(rBase, qword[rState + offsetof(lua_State, base)]);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Note: only uses rax/rdx, the caller may use other registers
|
2023-04-07 22:01:29 +01:00
|
|
|
static void emitSetSavedPc(AssemblyBuilderX64& build, int pcpos)
|
2022-10-14 20:48:41 +01:00
|
|
|
{
|
|
|
|
build.mov(rdx, sCode);
|
|
|
|
build.add(rdx, pcpos * sizeof(Instruction));
|
|
|
|
build.mov(rax, qword[rState + offsetof(lua_State, ci)]);
|
|
|
|
build.mov(qword[rax + offsetof(CallInfo, savedpc)], rdx);
|
|
|
|
}
|
|
|
|
|
|
|
|
void emitInterrupt(AssemblyBuilderX64& build, int pcpos)
|
|
|
|
{
|
|
|
|
Label skip;
|
|
|
|
|
|
|
|
// Skip if there is no interrupt set
|
|
|
|
build.mov(r8, qword[rState + offsetof(lua_State, global)]);
|
|
|
|
build.mov(r8, qword[r8 + offsetof(global_State, cb.interrupt)]);
|
|
|
|
build.test(r8, r8);
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Zero, skip);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
emitSetSavedPc(build, pcpos + 1); // uses rax/rdx
|
|
|
|
|
|
|
|
// Call interrupt
|
|
|
|
// TODO: This code should move to the end of the function, or even be outlined so that it can be shared by multiple interruptible instructions
|
|
|
|
build.mov(rArg1, rState);
|
2022-10-21 18:54:01 +01:00
|
|
|
build.mov(dwordReg(rArg2), -1); // function accepts 'int' here and using qword reg would've forced 8 byte constant here
|
2022-10-14 20:48:41 +01:00
|
|
|
build.call(r8);
|
|
|
|
|
2023-03-31 19:42:49 +01:00
|
|
|
emitUpdateBase(build); // interrupt may have reallocated stack
|
|
|
|
|
2022-10-14 20:48:41 +01:00
|
|
|
// Check if we need to exit
|
|
|
|
build.mov(al, byte[rState + offsetof(lua_State, status)]);
|
|
|
|
build.test(al, al);
|
2022-11-04 17:33:22 +00:00
|
|
|
build.jcc(ConditionX64::Zero, skip);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
|
|
|
build.mov(rax, qword[rState + offsetof(lua_State, ci)]);
|
|
|
|
build.sub(qword[rax + offsetof(CallInfo, savedpc)], sizeof(Instruction));
|
|
|
|
emitExit(build, /* continueInVm */ false);
|
|
|
|
|
|
|
|
build.setLabel(skip);
|
|
|
|
}
|
|
|
|
|
|
|
|
void emitFallback(AssemblyBuilderX64& build, NativeState& data, int op, int pcpos)
|
|
|
|
{
|
|
|
|
NativeFallback& opinfo = data.context.fallback[op];
|
|
|
|
LUAU_ASSERT(opinfo.fallback);
|
|
|
|
|
|
|
|
if (build.logText)
|
|
|
|
build.logAppend("; fallback\n");
|
|
|
|
|
|
|
|
// fallback(L, instruction, base, k)
|
|
|
|
build.mov(rArg1, rState);
|
|
|
|
build.mov(rArg2, sCode);
|
|
|
|
build.add(rArg2, pcpos * sizeof(Instruction));
|
|
|
|
build.mov(rArg3, rBase);
|
|
|
|
build.mov(rArg4, rConstants);
|
|
|
|
build.call(qword[rNativeContext + offsetof(NativeContext, fallback) + op * sizeof(NativeFallback) + offsetof(NativeFallback, fallback)]);
|
|
|
|
|
|
|
|
emitUpdateBase(build);
|
|
|
|
|
|
|
|
// Some instructions may jump to a different instruction or a completely different function
|
|
|
|
if (opinfo.flags & kFallbackUpdatePc)
|
|
|
|
{
|
|
|
|
build.mov(rcx, sClosure);
|
|
|
|
build.mov(rcx, qword[rcx + offsetof(Closure, l.p)]);
|
|
|
|
|
|
|
|
// Get instruction index from returned instruction pointer
|
|
|
|
// To get instruction index from instruction pointer, we need to divide byte offset by 4
|
|
|
|
// But we will actually need to scale instruction index by 8 back to byte offset later so it cancels out
|
|
|
|
build.sub(rax, sCode);
|
|
|
|
|
|
|
|
build.mov(rdx, qword[rcx + offsetofProtoExecData]);
|
|
|
|
|
|
|
|
// Get new instruction location and jump to it
|
|
|
|
build.mov(rcx, qword[rdx + offsetof(NativeProto, instTargets)]);
|
|
|
|
build.jmp(qword[rax * 2 + rcx]);
|
|
|
|
}
|
2022-11-04 17:33:22 +00:00
|
|
|
}
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2022-11-04 17:33:22 +00:00
|
|
|
void emitContinueCallInVm(AssemblyBuilderX64& build)
|
|
|
|
{
|
|
|
|
RegisterX64 proto = rcx; // Sync with emitInstCall
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2022-11-04 17:33:22 +00:00
|
|
|
build.mov(rdx, qword[proto + offsetof(Proto, code)]);
|
|
|
|
build.mov(rax, qword[rState + offsetof(lua_State, ci)]);
|
|
|
|
build.mov(qword[rax + offsetof(CallInfo, savedpc)], rdx);
|
2022-10-14 20:48:41 +01:00
|
|
|
|
2022-11-04 17:33:22 +00:00
|
|
|
emitExit(build, /* continueInVm */ true);
|
2022-10-14 20:48:41 +01:00
|
|
|
}
|
|
|
|
|
2023-03-03 20:21:14 +00:00
|
|
|
} // namespace X64
|
2022-10-14 20:48:41 +01:00
|
|
|
} // namespace CodeGen
|
|
|
|
} // namespace Luau
|