From ec90cd1bf8f905e7befbb5049afbd1304dbfe13b Mon Sep 17 00:00:00 2001 From: Chris Lattner Date: Mon, 10 Dec 2007 19:10:18 +0000 Subject: [PATCH] Disable cfi directives for now, darwin does't support them. These should probably be something like: CFI(".cfi_def_cfa_offset 16\n") where CFI is defined to a noop on darwin and other platforms that don't support those directives. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@44803 91177308-0d34-0410-b5e6-96231b3b80d8 --- lib/Target/X86/X86JITInfo.cpp | 137 +++++++++++++++++----------------- 1 file changed, 70 insertions(+), 67 deletions(-) diff --git a/lib/Target/X86/X86JITInfo.cpp b/lib/Target/X86/X86JITInfo.cpp index 4bafdcd8e8d..9aa10d58483 100644 --- a/lib/Target/X86/X86JITInfo.cpp +++ b/lib/Target/X86/X86JITInfo.cpp @@ -58,27 +58,27 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" // Save RBP "pushq %rbp\n" - ".cfi_def_cfa_offset 16\n" - ".cfi_offset %rbp, -16\n" +// ".cfi_def_cfa_offset 16\n" +// ".cfi_offset %rbp, -16\n" // Save RSP "movq %rsp, %rbp\n" - ".cfi_def_cfa_register %rbp\n" +// ".cfi_def_cfa_register %rbp\n" // Save all int arg registers "pushq %rdi\n" - ".cfi_rel_offset %rdi, 0\n" +// ".cfi_rel_offset %rdi, 0\n" "pushq %rsi\n" - ".cfi_rel_offset %rsi, 8\n" +// ".cfi_rel_offset %rsi, 8\n" "pushq %rdx\n" - ".cfi_rel_offset %rdx, 16\n" +// ".cfi_rel_offset %rdx, 16\n" "pushq %rcx\n" - ".cfi_rel_offset %rcx, 24\n" +// ".cfi_rel_offset %rcx, 24\n" "pushq %r8\n" - ".cfi_rel_offset %r8, 32\n" +// ".cfi_rel_offset %r8, 32\n" "pushq %r9\n" - ".cfi_rel_offset %r9, 40\n" +// ".cfi_rel_offset %r9, 40\n" // Align stack on 16-byte boundary. ESP might not be properly aligned // (8 byte) if this is called from an indirect stub. "andq $-16, %rsp\n" @@ -107,34 +107,35 @@ extern "C" { "movaps (%rsp), %xmm0\n" // Restore RSP "movq %rbp, %rsp\n" - ".cfi_def_cfa_register esp\n" +// ".cfi_def_cfa_register esp\n" // Restore all int arg registers "subq $48, %rsp\n" - ".cfi_adjust_cfa_offset 48\n" +// ".cfi_adjust_cfa_offset 48\n" "popq %r9\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %r9\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %r9\n" "popq %r8\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %r8\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %r8\n" "popq %rcx\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rcx\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rcx\n" "popq %rdx\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rdx\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rdx\n" "popq %rsi\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rsi\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rsi\n" "popq %rdi\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rdi\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rdi\n" // Restore RBP "popq %rbp\n" - ".cfi_adjust_cfa_offset -8\n" - ".cfi_restore %rbp\n" +// ".cfi_adjust_cfa_offset -8\n" +// ".cfi_restore %rbp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); #elif defined(__i386__) || defined(i386) || defined(_M_IX86) #ifndef _MSC_VER void X86CompilationCallback(void); @@ -143,18 +144,18 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback\n" ASMPREFIX "X86CompilationCallback:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" "pushl %ebp\n" - ".cfi_def_cfa_offset 8\n" - ".cfi_offset %ebp, -8\n" +// ".cfi_def_cfa_offset 8\n" +// ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register %ebp\n" +// ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset %eax, 0\n" +// ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset %edx, 4\n" +// ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset %ecx, 8\n" +// ".cfi_rel_offset %ecx, 8\n" #if defined(__APPLE__) "andl $-16, %esp\n" // Align ESP on 16-byte boundary #endif @@ -164,23 +165,24 @@ extern "C" { "movl %ebp, (%esp)\n" "call " ASMPREFIX "X86CompilationCallback2\n" "movl %ebp, %esp\n" // Restore ESP - ".cfi_def_cfa_register %esp\n" +// ".cfi_def_cfa_register %esp\n" "subl $12, %esp\n" - ".cfi_adjust_cfa_offset 12\n" +// ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ecx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ecx\n" "popl %edx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %edx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %edx\n" "popl %eax\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %eax\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %eax\n" "popl %ebp\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ebp\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ebp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); // Same as X86CompilationCallback but also saves XMM argument registers. void X86CompilationCallback_SSE(void); @@ -189,18 +191,18 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback_SSE\n" ASMPREFIX "X86CompilationCallback_SSE:\n" - ".cfi_startproc\n" +// ".cfi_startproc\n" "pushl %ebp\n" - ".cfi_def_cfa_offset 8\n" - ".cfi_offset %ebp, -8\n" +// ".cfi_def_cfa_offset 8\n" +// ".cfi_offset %ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue - ".cfi_def_cfa_register %ebp\n" +// ".cfi_def_cfa_register %ebp\n" "pushl %eax\n" - ".cfi_rel_offset %eax, 0\n" +// ".cfi_rel_offset %eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX - ".cfi_rel_offset %edx, 4\n" +// ".cfi_rel_offset %edx, 4\n" "pushl %ecx\n" - ".cfi_rel_offset %ecx, 8\n" +// ".cfi_rel_offset %ecx, 8\n" "andl $-16, %esp\n" // Align ESP on 16-byte boundary // Save all XMM arg registers "subl $64, %esp\n" @@ -218,31 +220,32 @@ extern "C" { "call " ASMPREFIX "X86CompilationCallback2\n" "addl $16, %esp\n" "movaps 48(%esp), %xmm3\n" - ".cfi_restore %xmm3\n" +// ".cfi_restore %xmm3\n" "movaps 32(%esp), %xmm2\n" - ".cfi_restore %xmm2\n" +// ".cfi_restore %xmm2\n" "movaps 16(%esp), %xmm1\n" - ".cfi_restore %xmm1\n" +// ".cfi_restore %xmm1\n" "movaps (%esp), %xmm0\n" - ".cfi_restore %xmm0\n" +// ".cfi_restore %xmm0\n" "movl %ebp, %esp\n" // Restore ESP - ".cfi_def_cfa_register esp\n" +// ".cfi_def_cfa_register esp\n" "subl $12, %esp\n" - ".cfi_adjust_cfa_offset 12\n" +// ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ecx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ecx\n" "popl %edx\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %edx\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %edx\n" "popl %eax\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %eax\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %eax\n" "popl %ebp\n" - ".cfi_adjust_cfa_offset -4\n" - ".cfi_restore %ebp\n" +// ".cfi_adjust_cfa_offset -4\n" +// ".cfi_restore %ebp\n" "ret\n" - ".cfi_endproc\n"); +// ".cfi_endproc\n" + ); #else void X86CompilationCallback2(void); -- 2.34.1