luau/CodeGen/src/CodeGenX64.cpp

212 lines
6.6 KiB
C++
Raw Normal View History

2022-10-13 23:59:53 +01:00
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "CodeGenX64.h"
#include "Luau/AssemblyBuilderX64.h"
#include "Luau/UnwindBuilder.h"
#include "NativeState.h"
#include "EmitCommonX64.h"
#include "lstate.h"
/* An overview of native environment stack setup that we are making in the entry function:
* Each line is 8 bytes, stack grows downwards.
*
* | ... previous frames ...
2022-11-10 22:04:44 +00:00
* | rdx home space | (unused)
* | rcx home space | (unused)
2022-10-13 23:59:53 +01:00
* | return address |
2022-11-10 22:04:44 +00:00
* | ... saved non-volatile registers ... <-- rsp + kStackSize + kLocalsSize
2022-10-13 23:59:53 +01:00
* | unused | for 16 byte alignment of the stack
* | sCode |
2022-11-10 22:04:44 +00:00
* | sClosure | <-- rsp + kStackSize
* | argument 6 | <-- rsp + 40
* | argument 5 | <-- rsp + 32
2022-10-13 23:59:53 +01:00
* | r9 home space |
* | r8 home space |
* | rdx home space |
* | rcx home space | <-- rsp points here
*
* Arguments to our entry function are saved to home space only on Windows.
* Space for arguments to function we call is always reserved, but used only on Windows.
*
* Right now we use a frame pointer, but because of a fixed layout we can omit it in the future
*/
namespace Luau
{
namespace CodeGen
{
2023-03-03 13:45:38 +00:00
namespace X64
2022-10-13 23:59:53 +01:00
{
2023-04-14 13:05:27 +01:00
struct EntryLocations
2022-10-13 23:59:53 +01:00
{
2023-04-14 13:05:27 +01:00
Label start;
Label prologueEnd;
Label epilogueStart;
};
static EntryLocations buildEntryFunction(AssemblyBuilderX64& build, UnwindBuilder& unwind)
{
EntryLocations locations;
2022-10-13 23:59:53 +01:00
2023-04-14 13:05:27 +01:00
build.align(kFunctionAlignment, X64::AlignmentDataX64::Ud2);
locations.start = build.setLabel();
unwind.startFunction();
2022-10-13 23:59:53 +01:00
2023-06-16 18:01:18 +01:00
RegisterX64 rArg1 = (build.abi == ABIX64::Windows) ? rcx : rdi;
RegisterX64 rArg2 = (build.abi == ABIX64::Windows) ? rdx : rsi;
RegisterX64 rArg3 = (build.abi == ABIX64::Windows) ? r8 : rdx;
RegisterX64 rArg4 = (build.abi == ABIX64::Windows) ? r9 : rcx;
2022-11-10 22:04:44 +00:00
// Save common non-volatile registers
if (build.abi == ABIX64::SystemV)
{
2023-05-05 20:57:12 +01:00
// We need to use a standard rbp-based frame setup for debuggers to work with JIT code
build.push(rbp);
2022-11-10 22:04:44 +00:00
build.mov(rbp, rsp);
2022-10-13 23:59:53 +01:00
}
build.push(rbx);
build.push(r12);
build.push(r13);
build.push(r14);
build.push(r15);
2022-11-10 22:04:44 +00:00
if (build.abi == ABIX64::Windows)
{
// Save non-volatile registers that are specific to Windows x64 ABI
build.push(rdi);
build.push(rsi);
2023-05-05 20:57:12 +01:00
// On Windows, rbp is available as a general-purpose non-volatile register; we currently don't use it, but we need to push an even number
// of registers for stack alignment...
build.push(rbp);
2022-10-13 23:59:53 +01:00
2022-11-10 22:04:44 +00:00
// TODO: once we start using non-volatile SIMD registers on Windows, we will save those here
}
2022-10-13 23:59:53 +01:00
2022-11-10 22:04:44 +00:00
// Allocate stack space (reg home area + local data)
build.sub(rsp, kStackSize + kLocalsSize);
2022-10-13 23:59:53 +01:00
2023-04-14 13:05:27 +01:00
locations.prologueEnd = build.setLabel();
2022-10-13 23:59:53 +01:00
2023-05-05 20:57:12 +01:00
uint32_t prologueSize = build.getLabelOffset(locations.prologueEnd) - build.getLabelOffset(locations.start);
if (build.abi == ABIX64::SystemV)
unwind.prologueX64(prologueSize, kStackSize + kLocalsSize, /* setupFrame= */ true, {rbx, r12, r13, r14, r15});
else if (build.abi == ABIX64::Windows)
unwind.prologueX64(prologueSize, kStackSize + kLocalsSize, /* setupFrame= */ false, {rbx, r12, r13, r14, r15, rdi, rsi, rbp});
2022-10-13 23:59:53 +01:00
// Setup native execution environment
build.mov(rState, rArg1);
build.mov(rNativeContext, rArg4);
build.mov(rBase, qword[rState + offsetof(lua_State, base)]); // L->base
build.mov(rax, qword[rState + offsetof(lua_State, ci)]); // L->ci
build.mov(rax, qword[rax + offsetof(CallInfo, func)]); // L->ci->func
build.mov(rax, qword[rax + offsetof(TValue, value.gc)]); // L->ci->func->value.gc aka cl
build.mov(sClosure, rax);
build.mov(rConstants, qword[rArg2 + offsetof(Proto, k)]); // proto->k
build.mov(rax, qword[rArg2 + offsetof(Proto, code)]); // proto->code
build.mov(sCode, rax);
// Jump to the specified instruction; further control flow will be handled with custom ABI with register setup from EmitCommonX64.h
build.jmp(rArg3);
// Even though we jumped away, we will return here in the end
2023-04-14 13:05:27 +01:00
locations.epilogueStart = build.setLabel();
2022-10-13 23:59:53 +01:00
// Cleanup and exit
2022-11-10 22:04:44 +00:00
build.add(rsp, kStackSize + kLocalsSize);
2022-10-13 23:59:53 +01:00
2022-10-21 18:33:43 +01:00
if (build.abi == ABIX64::Windows)
2022-10-13 23:59:53 +01:00
{
2023-05-05 20:57:12 +01:00
build.pop(rbp);
2022-10-13 23:59:53 +01:00
build.pop(rsi);
build.pop(rdi);
}
2022-11-10 22:04:44 +00:00
build.pop(r15);
build.pop(r14);
build.pop(r13);
build.pop(r12);
build.pop(rbx);
2023-05-05 20:57:12 +01:00
if (build.abi == ABIX64::SystemV)
build.pop(rbp);
2022-10-13 23:59:53 +01:00
build.ret();
2023-04-14 13:05:27 +01:00
// Our entry function is special, it spans the whole remaining code area
unwind.finishFunction(build.getLabelOffset(locations.start), kFullBlockFuncton);
return locations;
}
bool initHeaderFunctions(NativeState& data)
{
AssemblyBuilderX64 build(/* logText= */ false);
UnwindBuilder& unwind = *data.unwindBuilder.get();
2023-05-05 20:57:12 +01:00
unwind.startInfo(UnwindBuilder::X64);
2023-04-14 13:05:27 +01:00
EntryLocations entryLocations = buildEntryFunction(build, unwind);
2022-10-13 23:59:53 +01:00
build.finalize();
2023-04-14 13:05:27 +01:00
unwind.finishInfo();
2022-10-13 23:59:53 +01:00
LUAU_ASSERT(build.data.empty());
2023-04-14 13:05:27 +01:00
uint8_t* codeStart = nullptr;
if (!data.codeAllocator.allocate(
build.data.data(), int(build.data.size()), build.code.data(), int(build.code.size()), data.gateData, data.gateDataSize, codeStart))
2022-10-13 23:59:53 +01:00
{
2023-05-25 21:46:51 +01:00
LUAU_ASSERT(!"Failed to create entry function");
2022-10-13 23:59:53 +01:00
return false;
}
// Set the offset at the begining so that functions in new blocks will not overlay the locations
// specified by the unwind information of the entry function
2023-04-14 13:05:27 +01:00
unwind.setBeginOffset(build.getLabelOffset(entryLocations.prologueEnd));
2022-10-13 23:59:53 +01:00
2023-04-14 13:05:27 +01:00
data.context.gateEntry = codeStart + build.getLabelOffset(entryLocations.start);
data.context.gateExit = codeStart + build.getLabelOffset(entryLocations.epilogueStart);
2022-10-13 23:59:53 +01:00
return true;
}
2023-03-17 14:59:30 +00:00
void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
{
if (build.logText)
build.logAppend("; exitContinueVm\n");
2023-06-16 18:01:18 +01:00
build.setLabel(helpers.exitContinueVm);
2023-03-17 14:59:30 +00:00
emitExit(build, /* continueInVm */ true);
if (build.logText)
build.logAppend("; exitNoContinueVm\n");
2023-06-16 18:01:18 +01:00
build.setLabel(helpers.exitNoContinueVm);
2023-03-17 14:59:30 +00:00
emitExit(build, /* continueInVm */ false);
if (build.logText)
build.logAppend("; continueCallInVm\n");
2023-06-16 18:01:18 +01:00
build.setLabel(helpers.continueCallInVm);
2023-03-17 14:59:30 +00:00
emitContinueCallInVm(build);
2023-06-09 13:20:36 +01:00
2023-06-16 18:01:18 +01:00
if (build.logText)
build.logAppend("; interrupt\n");
build.setLabel(helpers.interrupt);
emitInterrupt(build);
2023-06-09 13:20:36 +01:00
if (build.logText)
build.logAppend("; return\n");
2023-06-16 18:01:18 +01:00
build.setLabel(helpers.return_);
2023-06-09 13:20:36 +01:00
emitReturn(build, helpers);
2023-03-17 14:59:30 +00:00
}
2023-03-03 13:45:38 +00:00
} // namespace X64
2022-10-13 23:59:53 +01:00
} // namespace CodeGen
} // namespace Luau