diff options
author | Roland McGrath <roland@hack.frob.com> | 2014-05-14 15:32:18 -0700 |
---|---|---|
committer | Roland McGrath <roland@hack.frob.com> | 2014-05-14 15:32:18 -0700 |
commit | 039890a67818211dba8207feab3832b0fcc391d2 (patch) | |
tree | d7b545eb3c8d66e5cc0b0055c8eaf197308cc5d5 /sysdeps/unix/sysv/linux/x86/hle.h | |
parent | 65c89320e2968c55feddfbdacd09c0a6cc0b006c (diff) | |
download | glibc-039890a67818211dba8207feab3832b0fcc391d2.tar.gz glibc-039890a67818211dba8207feab3832b0fcc391d2.tar.xz glibc-039890a67818211dba8207feab3832b0fcc391d2.zip |
Move remaining files out of nptl/sysdeps/unix/sysv/linux/x86/.
Diffstat (limited to 'sysdeps/unix/sysv/linux/x86/hle.h')
-rw-r--r-- | sysdeps/unix/sysv/linux/x86/hle.h | 75 |
1 files changed, 75 insertions, 0 deletions
diff --git a/sysdeps/unix/sysv/linux/x86/hle.h b/sysdeps/unix/sysv/linux/x86/hle.h new file mode 100644 index 0000000000..4a7b9e3bf7 --- /dev/null +++ b/sysdeps/unix/sysv/linux/x86/hle.h @@ -0,0 +1,75 @@ +/* Shared RTM header. Emulate TSX intrinsics for compilers and assemblers + that do not support the intrinsics and instructions yet. */ +#ifndef _HLE_H +#define _HLE_H 1 + +#ifdef __ASSEMBLER__ + +.macro XBEGIN target + .byte 0xc7,0xf8 + .long \target-1f +1: +.endm + +.macro XEND + .byte 0x0f,0x01,0xd5 +.endm + +.macro XABORT code + .byte 0xc6,0xf8,\code +.endm + +.macro XTEST + .byte 0x0f,0x01,0xd6 +.endm + +#endif + +/* Official RTM intrinsics interface matching gcc/icc, but works + on older gcc compatible compilers and binutils. + We should somehow detect if the compiler supports it, because + it may be able to generate slightly better code. */ + +#define _XBEGIN_STARTED (~0u) +#define _XABORT_EXPLICIT (1 << 0) +#define _XABORT_RETRY (1 << 1) +#define _XABORT_CONFLICT (1 << 2) +#define _XABORT_CAPACITY (1 << 3) +#define _XABORT_DEBUG (1 << 4) +#define _XABORT_NESTED (1 << 5) +#define _XABORT_CODE(x) (((x) >> 24) & 0xff) + +#define _ABORT_LOCK_BUSY 0xff +#define _ABORT_LOCK_IS_LOCKED 0xfe +#define _ABORT_NESTED_TRYLOCK 0xfd + +#ifndef __ASSEMBLER__ + +#define __force_inline __attribute__((__always_inline__)) inline + +static __force_inline int _xbegin(void) +{ + int ret = _XBEGIN_STARTED; + asm volatile (".byte 0xc7,0xf8 ; .long 0" : "+a" (ret) :: "memory"); + return ret; +} + +static __force_inline void _xend(void) +{ + asm volatile (".byte 0x0f,0x01,0xd5" ::: "memory"); +} + +static __force_inline void _xabort(const unsigned int status) +{ + asm volatile (".byte 0xc6,0xf8,%P0" :: "i" (status) : "memory"); +} + +static __force_inline int _xtest(void) +{ + unsigned char out; + asm volatile (".byte 0x0f,0x01,0xd6 ; setnz %0" : "=r" (out) :: "memory"); + return out; +} + +#endif +#endif |