From 7be2249733d3fff0d6c7c84937360b3268313223 Mon Sep 17 00:00:00 2001 From: Ulrich Drepper Date: Sat, 20 Aug 2011 08:56:30 -0400 Subject: Fix CFI info in x86-64 trampolines for non-AVX code (cherry picked from commit c88f17668b67d22fe470933ab81119de587ee175) --- ChangeLog | 8 ++++++++ sysdeps/x86_64/dl-trampoline.S | 5 +++-- sysdeps/x86_64/dl-trampoline.h | 25 ++++++++++++++++--------- 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/ChangeLog b/ChangeLog index df81b4da9a..7250f068bc 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,11 @@ +2011-08-20 Ulrich Drepper + + * sysdeps/x86_64/dl-trampoline.h: If MORE_CODE is defined, restore + the CFI state in the end. + * sysdeps/x86_64/dl-trampoline.S: Define MORE_CODE before first + inclusion of dl-trampoline.h. + Based on a patch by Jiri Olsa . + 2011-08-14 Roland McGrath * locale/Makefile (locale-CPPFLAGS): Renamed CPPFLAGS-locale-programs. diff --git a/sysdeps/x86_64/dl-trampoline.S b/sysdeps/x86_64/dl-trampoline.S index 45a2dc20c8..317610c12e 100644 --- a/sysdeps/x86_64/dl-trampoline.S +++ b/sysdeps/x86_64/dl-trampoline.S @@ -158,14 +158,15 @@ L(have_avx): 1: js L(no_avx) # define RESTORE_AVX +# define MORE_CODE # include "dl-trampoline.h" .align 16 L(no_avx): # endif -# undef RESTORE_AVX -# include "dl-trampoline.h" +# undef RESTORE_AVX +# include "dl-trampoline.h" cfi_endproc .size _dl_runtime_profile, .-_dl_runtime_profile diff --git a/sysdeps/x86_64/dl-trampoline.h b/sysdeps/x86_64/dl-trampoline.h index 5d49ed4408..1c39579830 100644 --- a/sysdeps/x86_64/dl-trampoline.h +++ b/sysdeps/x86_64/dl-trampoline.h @@ -1,6 +1,6 @@ /* Partial PLT profile trampoline to save and restore x86-64 vector registers. - Copyright (C) 2009 Free Software Foundation, Inc. + Copyright (C) 2009, 2011 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -195,14 +195,14 @@ _dl_call_pltexit. The La_x86_64_regs is being pointed by rsp now, so we just need to allocate the sizeof(La_x86_64_retval) space on the stack, since the alignment has already been taken care of. */ -# ifdef RESTORE_AVX +#ifdef RESTORE_AVX /* sizeof(La_x86_64_retval). Need extra space for 2 SSE registers to detect if xmm0/xmm1 registers are changed by audit module. */ subq $(LRV_SIZE + XMM_SIZE*2), %rsp -# else +#else subq $LRV_SIZE, %rsp # sizeof(La_x86_64_retval) -# endif +#endif movq %rsp, %rcx # La_x86_64_retval argument to %rcx. /* Fill in the La_x86_64_retval structure. */ @@ -212,7 +212,7 @@ movaps %xmm0, LRV_XMM0_OFFSET(%rcx) movaps %xmm1, LRV_XMM1_OFFSET(%rcx) -# ifdef RESTORE_AVX +#ifdef RESTORE_AVX /* This is to support AVX audit modules. */ vmovdqu %ymm0, LRV_VECTOR0_OFFSET(%rcx) vmovdqu %ymm1, LRV_VECTOR1_OFFSET(%rcx) @@ -221,14 +221,14 @@ by audit module. */ vmovdqa %xmm0, (LRV_SIZE)(%rcx) vmovdqa %xmm1, (LRV_SIZE + XMM_SIZE)(%rcx) -# endif +#endif fstpt LRV_ST0_OFFSET(%rcx) fstpt LRV_ST1_OFFSET(%rcx) movq 24(%rbx), %rdx # La_x86_64_regs argument to %rdx. movq 40(%rbx), %rsi # Copy args pushed by PLT in register. - movq 32(%rbx), %rdi # %rdi: link_map, %rsi: reloc_index + movq 32(%rbx), %rdi # %rdi: link_map, %rsi: reloc_index call _dl_call_pltexit /* Restore return registers. */ @@ -238,7 +238,7 @@ movaps LRV_XMM0_OFFSET(%rsp), %xmm0 movaps LRV_XMM1_OFFSET(%rsp), %xmm1 -# ifdef RESTORE_AVX +#ifdef RESTORE_AVX /* Check if xmm0/xmm1 registers are changed by audit module. */ vpcmpeqq (LRV_SIZE)(%rsp), %xmm0, %xmm2 vpmovmskb %xmm2, %esi @@ -253,7 +253,7 @@ vmovdqu LRV_VECTOR1_OFFSET(%rsp), %ymm1 1: -# endif +#endif fldt LRV_ST1_OFFSET(%rsp) fldt LRV_ST0_OFFSET(%rsp) @@ -267,3 +267,10 @@ # (eats the reloc index and link_map) cfi_adjust_cfa_offset(-48) retq + +#ifdef MORE_CODE + cfi_adjust_cfa_offset(48) + cfi_rel_offset(%rbx, 0) + cfi_def_cfa_register(%rbx) +# undef MORE_CODE +#endif -- cgit 1.4.1