Title: [220926] trunk/Source
Revision
220926
Author
[email protected]
Date
2017-08-18 12:20:23 -0700 (Fri, 18 Aug 2017)

Log Message

Implement 32-bit MacroAssembler::probe support for Windows.
https://bugs.webkit.org/show_bug.cgi?id=175449

Reviewed by Mark Lam.

Source/_javascript_Core:

This is needed to enable the DFG.

* assembler/MacroAssemblerX86Common.cpp:
* assembler/testmasm.cpp:
(JSC::run):
(dllLauncherEntryPoint):
* shell/CMakeLists.txt:
* shell/PlatformWin.cmake:

Source/WTF:

Enable the DFG on Win32. 

* wtf/Platform.h:

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (220925 => 220926)


--- trunk/Source/_javascript_Core/ChangeLog	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/_javascript_Core/ChangeLog	2017-08-18 19:20:23 UTC (rev 220926)
@@ -1,3 +1,19 @@
+2017-08-18  Per Arne Vollan  <[email protected]>
+
+        Implement 32-bit MacroAssembler::probe support for Windows.
+        https://bugs.webkit.org/show_bug.cgi?id=175449
+
+        Reviewed by Mark Lam.
+
+        This is needed to enable the DFG.
+
+        * assembler/MacroAssemblerX86Common.cpp:
+        * assembler/testmasm.cpp:
+        (JSC::run):
+        (dllLauncherEntryPoint):
+        * shell/CMakeLists.txt:
+        * shell/PlatformWin.cmake:
+
 2017-08-18  Mark Lam  <[email protected]>
 
         Rename ProbeContext and ProbeFunction to Probe::State and Probe::Function.

Modified: trunk/Source/_javascript_Core/assembler/MacroAssemblerX86Common.cpp (220925 => 220926)


--- trunk/Source/_javascript_Core/assembler/MacroAssemblerX86Common.cpp	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/_javascript_Core/assembler/MacroAssemblerX86Common.cpp	2017-08-18 19:20:23 UTC (rev 220926)
@@ -36,8 +36,6 @@
 
 extern "C" void ctiMasmProbeTrampoline();
 
-#if COMPILER(GCC_OR_CLANG)
-
 // The following are offsets for Probe::State fields accessed by the ctiMasmProbeTrampoline stub.
 
 #if CPU(X86)
@@ -110,53 +108,54 @@
 // These ASSERTs remind you that if you change the layout of Probe::State,
 // you need to change ctiMasmProbeTrampoline offsets above to match.
 #define PROBE_OFFSETOF(x) offsetof(struct Probe::State, x)
+#define PROBE_OFFSETOF_REG(x, reg) offsetof(struct Probe::State, x) + reg * sizeof((reinterpret_cast<Probe::State*>(0))->x[reg])
 COMPILE_ASSERT(PROBE_OFFSETOF(probeFunction) == PROBE_PROBE_FUNCTION_OFFSET, ProbeState_probeFunction_offset_matches_ctiMasmProbeTrampoline);
 COMPILE_ASSERT(PROBE_OFFSETOF(arg) == PROBE_ARG_OFFSET, ProbeState_arg_offset_matches_ctiMasmProbeTrampoline);
 COMPILE_ASSERT(PROBE_OFFSETOF(initializeStackFunction) == PROBE_INIT_STACK_FUNCTION_OFFSET, ProbeState_initializeStackFunction_offset_matches_ctiMasmProbeTrampoline);
 COMPILE_ASSERT(PROBE_OFFSETOF(initializeStackArg) == PROBE_INIT_STACK_ARG_OFFSET, ProbeState_initializeStackArg_offset_matches_ctiMasmProbeTrampoline);
 
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::eax]) == PROBE_CPU_EAX_OFFSET, ProbeState_cpu_eax_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::ecx]) == PROBE_CPU_ECX_OFFSET, ProbeState_cpu_ecx_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::edx]) == PROBE_CPU_EDX_OFFSET, ProbeState_cpu_edx_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::ebx]) == PROBE_CPU_EBX_OFFSET, ProbeState_cpu_ebx_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::esp]) == PROBE_CPU_ESP_OFFSET, ProbeState_cpu_esp_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::ebp]) == PROBE_CPU_EBP_OFFSET, ProbeState_cpu_ebp_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::esi]) == PROBE_CPU_ESI_OFFSET, ProbeState_cpu_esi_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::edi]) == PROBE_CPU_EDI_OFFSET, ProbeState_cpu_edi_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.sprs[X86Registers::eip]) == PROBE_CPU_EIP_OFFSET, ProbeState_cpu_eip_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.sprs[X86Registers::eflags]) == PROBE_CPU_EFLAGS_OFFSET, ProbeState_cpu_eflags_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::eax) == PROBE_CPU_EAX_OFFSET, ProbeState_cpu_eax_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::ecx) == PROBE_CPU_ECX_OFFSET, ProbeState_cpu_ecx_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::edx) == PROBE_CPU_EDX_OFFSET, ProbeState_cpu_edx_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::ebx) == PROBE_CPU_EBX_OFFSET, ProbeState_cpu_ebx_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::esp) == PROBE_CPU_ESP_OFFSET, ProbeState_cpu_esp_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::ebp) == PROBE_CPU_EBP_OFFSET, ProbeState_cpu_ebp_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::esi) == PROBE_CPU_ESI_OFFSET, ProbeState_cpu_esi_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::edi) == PROBE_CPU_EDI_OFFSET, ProbeState_cpu_edi_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.sprs, X86Registers::eip) == PROBE_CPU_EIP_OFFSET, ProbeState_cpu_eip_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.sprs, X86Registers::eflags) == PROBE_CPU_EFLAGS_OFFSET, ProbeState_cpu_eflags_offset_matches_ctiMasmProbeTrampoline);
 
 #if CPU(X86_64)
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r8]) == PROBE_CPU_R8_OFFSET, ProbeState_cpu_r8_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r9]) == PROBE_CPU_R9_OFFSET, ProbeState_cpu_r9_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r10]) == PROBE_CPU_R10_OFFSET, ProbeState_cpu_r10_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r11]) == PROBE_CPU_R11_OFFSET, ProbeState_cpu_r11_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r12]) == PROBE_CPU_R12_OFFSET, ProbeState_cpu_r12_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r13]) == PROBE_CPU_R13_OFFSET, ProbeState_cpu_r13_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r14]) == PROBE_CPU_R14_OFFSET, ProbeState_cpu_r14_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.gprs[X86Registers::r15]) == PROBE_CPU_R15_OFFSET, ProbeState_cpu_r15_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r8) == PROBE_CPU_R8_OFFSET, ProbeState_cpu_r8_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r9) == PROBE_CPU_R9_OFFSET, ProbeState_cpu_r9_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r10) == PROBE_CPU_R10_OFFSET, ProbeState_cpu_r10_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r11) == PROBE_CPU_R11_OFFSET, ProbeState_cpu_r11_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r12) == PROBE_CPU_R12_OFFSET, ProbeState_cpu_r12_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r13) == PROBE_CPU_R13_OFFSET, ProbeState_cpu_r13_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r14) == PROBE_CPU_R14_OFFSET, ProbeState_cpu_r14_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.gprs, X86Registers::r15) == PROBE_CPU_R15_OFFSET, ProbeState_cpu_r15_offset_matches_ctiMasmProbeTrampoline);
 #endif // CPU(X86_64)
 
 COMPILE_ASSERT(!(PROBE_CPU_XMM0_OFFSET & 0x7), ProbeState_cpu_xmm0_offset_should_be_8_byte_aligned);
 
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm0]) == PROBE_CPU_XMM0_OFFSET, ProbeState_cpu_xmm0_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm1]) == PROBE_CPU_XMM1_OFFSET, ProbeState_cpu_xmm1_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm2]) == PROBE_CPU_XMM2_OFFSET, ProbeState_cpu_xmm2_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm3]) == PROBE_CPU_XMM3_OFFSET, ProbeState_cpu_xmm3_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm4]) == PROBE_CPU_XMM4_OFFSET, ProbeState_cpu_xmm4_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm5]) == PROBE_CPU_XMM5_OFFSET, ProbeState_cpu_xmm5_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm6]) == PROBE_CPU_XMM6_OFFSET, ProbeState_cpu_xmm6_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm7]) == PROBE_CPU_XMM7_OFFSET, ProbeState_cpu_xmm7_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm0) == PROBE_CPU_XMM0_OFFSET, ProbeState_cpu_xmm0_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm1) == PROBE_CPU_XMM1_OFFSET, ProbeState_cpu_xmm1_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm2) == PROBE_CPU_XMM2_OFFSET, ProbeState_cpu_xmm2_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm3) == PROBE_CPU_XMM3_OFFSET, ProbeState_cpu_xmm3_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm4) == PROBE_CPU_XMM4_OFFSET, ProbeState_cpu_xmm4_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm5) == PROBE_CPU_XMM5_OFFSET, ProbeState_cpu_xmm5_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm6) == PROBE_CPU_XMM6_OFFSET, ProbeState_cpu_xmm6_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm7) == PROBE_CPU_XMM7_OFFSET, ProbeState_cpu_xmm7_offset_matches_ctiMasmProbeTrampoline);
 
 #if CPU(X86_64)
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm8]) == PROBE_CPU_XMM8_OFFSET, ProbeState_cpu_xmm8_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm9]) == PROBE_CPU_XMM9_OFFSET, ProbeState_cpu_xmm9_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm10]) == PROBE_CPU_XMM10_OFFSET, ProbeState_cpu_xmm10_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm11]) == PROBE_CPU_XMM11_OFFSET, ProbeState_cpu_xmm11_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm12]) == PROBE_CPU_XMM12_OFFSET, ProbeState_cpu_xmm12_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm13]) == PROBE_CPU_XMM13_OFFSET, ProbeState_cpu_xmm13_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm14]) == PROBE_CPU_XMM14_OFFSET, ProbeState_cpu_xmm14_offset_matches_ctiMasmProbeTrampoline);
-COMPILE_ASSERT(PROBE_OFFSETOF(cpu.fprs[X86Registers::xmm15]) == PROBE_CPU_XMM15_OFFSET, ProbeState_cpu_xmm15_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm8) == PROBE_CPU_XMM8_OFFSET, ProbeState_cpu_xmm8_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm9) == PROBE_CPU_XMM9_OFFSET, ProbeState_cpu_xmm9_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm10) == PROBE_CPU_XMM10_OFFSET, ProbeState_cpu_xmm10_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm11) == PROBE_CPU_XMM11_OFFSET, ProbeState_cpu_xmm11_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm12) == PROBE_CPU_XMM12_OFFSET, ProbeState_cpu_xmm12_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm13) == PROBE_CPU_XMM13_OFFSET, ProbeState_cpu_xmm13_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm14) == PROBE_CPU_XMM14_OFFSET, ProbeState_cpu_xmm14_offset_matches_ctiMasmProbeTrampoline);
+COMPILE_ASSERT(PROBE_OFFSETOF_REG(cpu.fprs, X86Registers::xmm15) == PROBE_CPU_XMM15_OFFSET, ProbeState_cpu_xmm15_offset_matches_ctiMasmProbeTrampoline);
 #endif // CPU(X86_64)
 
 COMPILE_ASSERT(sizeof(Probe::State) == PROBE_SIZE, ProbeState_size_matches_ctiMasmProbeTrampoline);
@@ -164,6 +163,7 @@
 #undef PROBE_OFFSETOF
 
 #if CPU(X86)
+#if COMPILER(GCC_OR_CLANG)
 asm (
     ".globl " SYMBOL_STRING(ctiMasmProbeTrampoline) "\n"
     HIDE_SYMBOL(ctiMasmProbeTrampoline) "\n"
@@ -323,9 +323,173 @@
     "popl %ebp" "\n"
     "ret" "\n"
 );
+#endif
+
+#if COMPILER(MSVC)
+extern "C" __declspec(naked) void ctiMasmProbeTrampoline()
+{
+    __asm {
+        pushfd;
+
+        // MacroAssemblerX86Common::probe() has already generated code to store some values.
+        // Together with the eflags pushed above, the top of stack now looks like
+        // this:
+        //     esp[0 * ptrSize]: eflags
+        //     esp[1 * ptrSize]: return address / saved eip
+        //     esp[2 * ptrSize]: probe handler function
+        //     esp[3 * ptrSize]: probe arg
+        //     esp[4 * ptrSize]: saved eax
+        //     esp[5 * ptrSize]: saved esp
+
+        mov eax, esp
+        sub esp, PROBE_SIZE + OUT_SIZE
+
+        // The X86_64 ABI specifies that the worse case stack alignment requirement is 32 bytes.
+        and esp, ~0x1f
+
+        mov [PROBE_CPU_EBP_OFFSET + esp], ebp
+        mov ebp, esp // Save the ProbeContext*.
+
+        mov [PROBE_CPU_ECX_OFFSET + ebp], ecx
+        mov [PROBE_CPU_EDX_OFFSET + ebp], edx
+        mov [PROBE_CPU_EBX_OFFSET + ebp], ebx
+        mov [PROBE_CPU_ESI_OFFSET + ebp], esi
+        mov [PROBE_CPU_EDI_OFFSET + ebp], edi
+
+        mov ecx, [0 * PTR_SIZE + eax]
+        mov [PROBE_CPU_EFLAGS_OFFSET + ebp], ecx
+        mov ecx, [1 * PTR_SIZE + eax]
+        mov [PROBE_CPU_EIP_OFFSET + ebp], ecx
+        mov ecx, [2 * PTR_SIZE + eax]
+        mov [PROBE_PROBE_FUNCTION_OFFSET + ebp], ecx
+        mov ecx, [3 * PTR_SIZE + eax]
+        mov [PROBE_ARG_OFFSET + ebp], ecx
+        mov ecx, [4 * PTR_SIZE + eax]
+        mov [PROBE_CPU_EAX_OFFSET + ebp], ecx
+        mov ecx, [5 * PTR_SIZE + eax]
+        mov [PROBE_CPU_ESP_OFFSET + ebp], ecx
+
+        movq qword ptr[PROBE_CPU_XMM0_OFFSET + ebp], xmm0
+        movq qword ptr[PROBE_CPU_XMM1_OFFSET + ebp], xmm1
+        movq qword ptr[PROBE_CPU_XMM2_OFFSET + ebp], xmm2
+        movq qword ptr[PROBE_CPU_XMM3_OFFSET + ebp], xmm3
+        movq qword ptr[PROBE_CPU_XMM4_OFFSET + ebp], xmm4
+        movq qword ptr[PROBE_CPU_XMM5_OFFSET + ebp], xmm5
+        movq qword ptr[PROBE_CPU_XMM6_OFFSET + ebp], xmm6
+        movq qword ptr[PROBE_CPU_XMM7_OFFSET + ebp], xmm7
+
+        xor eax, eax
+        mov [PROBE_INIT_STACK_FUNCTION_OFFSET + ebp], eax
+
+        // Reserve stack space for the arg while maintaining the required stack
+        // pointer 32 byte alignment:
+        sub esp, 0x20
+        mov [0 + esp], ebp // the ProbeContext* arg.
+
+        call [PROBE_PROBE_FUNCTION_OFFSET + ebp]
+
+        // Make sure the ProbeContext is entirely below the result stack pointer so
+        // that register values are still preserved when we call the initializeStack
+        // function.
+        mov ecx, PROBE_SIZE + OUT_SIZE
+        mov eax, ebp
+        mov edx, [PROBE_CPU_ESP_OFFSET + ebp]
+        add eax, ecx
+        cmp edx, eax
+        jge ctiMasmProbeTrampolineProbeContextIsSafe
+
+        // Allocate a safe place on the stack below the result stack pointer to stash the ProbeContext.
+        sub edx, ecx
+        and edx, ~0x1f // Keep the stack pointer 32 bytes aligned.
+        xor eax, eax
+        mov esp, edx
+
+        mov ecx, PROBE_SIZE
+
+        // Copy the ProbeContext to the safe place.
+        ctiMasmProbeTrampolineCopyLoop :
+        mov edx, [ebp + eax]
+        mov [esp + eax], edx
+        add eax, PTR_SIZE
+        cmp ecx, eax
+        jg ctiMasmProbeTrampolineCopyLoop
+
+        mov ebp, esp
+
+        // Call initializeStackFunction if present.
+        ctiMasmProbeTrampolineProbeContextIsSafe :
+        xor ecx, ecx
+        add ecx, [PROBE_INIT_STACK_FUNCTION_OFFSET + ebp]
+        je ctiMasmProbeTrampolineRestoreRegisters
+
+        // Reserve stack space for the arg while maintaining the required stack
+        // pointer 32 byte alignment:
+        sub esp, 0x20
+        mov [0 + esp], ebp // the ProbeContext* arg.
+        call ecx
+
+        ctiMasmProbeTrampolineRestoreRegisters :
+
+        // To enable probes to modify register state, we copy all registers
+        // out of the ProbeContext before returning.
+
+        mov edx, [PROBE_CPU_EDX_OFFSET + ebp]
+        mov ebx, [PROBE_CPU_EBX_OFFSET + ebp]
+        mov esi, [PROBE_CPU_ESI_OFFSET + ebp]
+        mov edi, [PROBE_CPU_EDI_OFFSET + ebp]
+
+        movq xmm0, qword ptr[PROBE_CPU_XMM0_OFFSET + ebp]
+        movq xmm1, qword ptr[PROBE_CPU_XMM1_OFFSET + ebp]
+        movq xmm2, qword ptr[PROBE_CPU_XMM2_OFFSET + ebp]
+        movq xmm3, qword ptr[PROBE_CPU_XMM3_OFFSET + ebp]
+        movq xmm4, qword ptr[PROBE_CPU_XMM4_OFFSET + ebp]
+        movq xmm5, qword ptr[PROBE_CPU_XMM5_OFFSET + ebp]
+        movq xmm6, qword ptr[PROBE_CPU_XMM6_OFFSET + ebp]
+        movq xmm7, qword ptr[PROBE_CPU_XMM7_OFFSET + ebp]
+
+        // There are 6 more registers left to restore:
+        //     eax, ecx, ebp, esp, eip, and eflags.
+
+        // The restoration process at ctiMasmProbeTrampolineEnd below works by popping
+        // 5 words off the stack into eflags, eax, ecx, ebp, and eip. These 5 words need
+        // to be pushed on top of the final esp value so that just by popping the 5 words,
+        // we'll get the esp that the probe wants to set. Let's call this area (for storing
+        // these 5 words) the restore area.
+        mov ecx, [PROBE_CPU_ESP_OFFSET + ebp]
+        sub ecx, 5 * PTR_SIZE
+
+        // ecx now points to the restore area.
+
+        // Copy remaining restore values from the ProbeContext to the restore area.
+        // Note: We already ensured above that the ProbeContext is in a safe location before
+        // calling the initializeStackFunction. The initializeStackFunction is not allowed to
+        // change the stack pointer again.
+        mov eax, [PROBE_CPU_EFLAGS_OFFSET + ebp]
+        mov [0 * PTR_SIZE + ecx], eax
+        mov eax, [PROBE_CPU_EAX_OFFSET + ebp]
+        mov [1 * PTR_SIZE + ecx], eax
+        mov eax, [PROBE_CPU_ECX_OFFSET + ebp]
+        mov [2 * PTR_SIZE + ecx], eax
+        mov eax, [PROBE_CPU_EBP_OFFSET + ebp]
+        mov [3 * PTR_SIZE + ecx], eax
+        mov eax, [PROBE_CPU_EIP_OFFSET + ebp]
+        mov [4 * PTR_SIZE + ecx], eax
+        mov esp, ecx
+
+        // Do the remaining restoration by popping off the restore area.
+        popfd
+        pop eax
+        pop ecx
+        pop ebp
+        ret
+    }
+}
+#endif
+
 #endif // CPU(X86)
 
 #if CPU(X86_64)
+#if COMPILER(GCC_OR_CLANG)
 asm (
     ".globl " SYMBOL_STRING(ctiMasmProbeTrampoline) "\n"
     HIDE_SYMBOL(ctiMasmProbeTrampoline) "\n"
@@ -413,7 +577,7 @@
     "addq %rcx, %rax" "\n"
     "cmpq %rax, %rdx" "\n"
     "jge " LOCAL_LABEL_STRING(ctiMasmProbeTrampolineProbeStateIsSafe) "\n"
- 
+
     // Allocate a safe place on the stack below the result stack pointer to stash the Probe::State.
     "subq %rcx, %rdx" "\n"
     "andq $~0x1f, %rdx" "\n" // Keep the stack pointer 32 bytes aligned.
@@ -513,11 +677,10 @@
     "popq %rbp" "\n"
     "ret" "\n"
 );
+#endif // COMPILER(GCC_OR_CLANG)
 #endif // CPU(X86_64)
 
-#endif // COMPILER(GCC_OR_CLANG)
-
-#if OS(WINDOWS)
+#if OS(WINDOWS) && CPU(X86_64)
 static bool booleanTrueForAvoidingNoReturnDeclaration() { return true; }
 
 extern "C" void ctiMasmProbeTrampoline()

Modified: trunk/Source/_javascript_Core/assembler/testmasm.cpp (220925 => 220926)


--- trunk/Source/_javascript_Core/assembler/testmasm.cpp	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/_javascript_Core/assembler/testmasm.cpp	2017-08-18 19:20:23 UTC (rev 220926)
@@ -691,7 +691,11 @@
     Deque<RefPtr<SharedTask<void()>>> tasks;
 
     auto shouldRun = [&] (const char* testName) -> bool {
+#if OS(UNIX)
         return !filter || !!strcasestr(testName, filter);
+#else
+        return !filter || !!strstr(testName, filter);
+#endif
     };
 
     RUN(testSimple());
@@ -765,3 +769,10 @@
     run(filter);
     return 0;
 }
+
+#if OS(WINDOWS)
+extern "C" __declspec(dllexport) int WINAPI dllLauncherEntryPoint(int argc, const char* argv[])
+{
+    return main(argc, const_cast<char**>(argv));
+}
+#endif

Modified: trunk/Source/_javascript_Core/shell/CMakeLists.txt (220925 => 220926)


--- trunk/Source/_javascript_Core/shell/CMakeLists.txt	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/_javascript_Core/shell/CMakeLists.txt	2017-08-18 19:20:23 UTC (rev 220926)
@@ -55,7 +55,6 @@
     add_dependencies(jsc jscLib)
 endif ()
 
-if (NOT WIN32)
     set(TESTMASM_SOURCES
         ../assembler/testmasm.cpp
     )
@@ -68,6 +67,7 @@
         ../b3/air/testair.cpp
     )
 
+if (NOT WIN32)
     add_executable(testmasm ${TESTMASM_SOURCES})
     target_link_libraries(testmasm ${JSC_LIBRARIES})
 

Modified: trunk/Source/_javascript_Core/shell/PlatformWin.cmake (220925 => 220926)


--- trunk/Source/_javascript_Core/shell/PlatformWin.cmake	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/_javascript_Core/shell/PlatformWin.cmake	2017-08-18 19:20:23 UTC (rev 220926)
@@ -33,3 +33,10 @@
 target_link_libraries(testapi shlwapi)
 add_dependencies(testapi testapiLib)
 target_link_libraries(testapiLib _javascript_Core)
+
+add_library(testmasmLib SHARED ../assembler/testmasm.cpp)
+add_executable(testmasm ${JSC_SOURCES})
+set_target_properties(testmasm PROPERTIES OUTPUT_NAME "testmasm${DEBUG_SUFFIX}")
+target_link_libraries(testmasm shlwapi)
+add_dependencies(testmasm testmasmLib)
+target_link_libraries(testmasmLib _javascript_Core)

Modified: trunk/Source/WTF/ChangeLog (220925 => 220926)


--- trunk/Source/WTF/ChangeLog	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/WTF/ChangeLog	2017-08-18 19:20:23 UTC (rev 220926)
@@ -1,3 +1,14 @@
+2017-08-18  Per Arne Vollan  <[email protected]>
+
+        Implement 32-bit MacroAssembler::probe support for Windows.
+        https://bugs.webkit.org/show_bug.cgi?id=175449
+
+        Reviewed by Mark Lam.
+
+        Enable the DFG on Win32. 
+
+        * wtf/Platform.h:
+
 2017-08-17  Mark Lam  <[email protected]>
 
         Only use 16 VFP registers if !CPU(ARM_NEON).

Modified: trunk/Source/WTF/wtf/Platform.h (220925 => 220926)


--- trunk/Source/WTF/wtf/Platform.h	2017-08-18 18:56:04 UTC (rev 220925)
+++ trunk/Source/WTF/wtf/Platform.h	2017-08-18 19:20:23 UTC (rev 220926)
@@ -797,10 +797,13 @@
 /* FIXME: MIPS cannot enable the DFG until it has support for MacroAssembler::probe().
    https://bugs.webkit.org/show_bug.cgi?id=175447
 */
-/* FIXME: Windows cannot enable the DFG until it has support for MacroAssembler::probe().
-   https://bugs.webkit.org/show_bug.cgi?id=175449
+/* FIXME: Win64 cannot enable the DFG until it has support for MacroAssembler::probe().
+   https://bugs.webkit.org/show_bug.cgi?id=175724
 */
+#if CPU(X86) && OS(WINDOWS)
+#define ENABLE_DFG_JIT 1
 #endif
+#endif
 
 /* Concurrent JS only works on 64-bit platforms because it requires that
    values get stored to atomically. This is trivially true on 64-bit platforms,
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to