diff options
Diffstat (limited to 'externals/mysql/atomic/x86-gcc.h')
-rw-r--r-- | externals/mysql/atomic/x86-gcc.h | 69 |
1 files changed, 69 insertions, 0 deletions
diff --git a/externals/mysql/atomic/x86-gcc.h b/externals/mysql/atomic/x86-gcc.h new file mode 100644 index 00000000000..5a34bc22f9e --- /dev/null +++ b/externals/mysql/atomic/x86-gcc.h @@ -0,0 +1,69 @@ +/* Copyright (C) 2006 MySQL AB + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; version 2 of the License. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ + +/* + XXX 64-bit atomic operations can be implemented using + cmpxchg8b, if necessary. Though I've heard that not all 64-bit + architectures support double-word (128-bit) cas. +*/ + +#ifdef __x86_64__ +# ifdef MY_ATOMIC_NO_XADD +# define MY_ATOMIC_MODE "gcc-amd64" LOCK_prefix "-no-xadd" +# else +# define MY_ATOMIC_MODE "gcc-amd64" LOCK_prefix +# endif +#else +# ifdef MY_ATOMIC_NO_XADD +# define MY_ATOMIC_MODE "gcc-x86" LOCK_prefix "-no-xadd" +# else +# define MY_ATOMIC_MODE "gcc-x86" LOCK_prefix +# endif +#endif + +/* fix -ansi errors while maintaining readability */ +#ifndef asm +#define asm __asm__ +#endif + +#ifndef MY_ATOMIC_NO_XADD +#define make_atomic_add_body(S) \ + asm volatile (LOCK_prefix "; xadd %0, %1;" : "+r" (v) , "+m" (*a)) +#endif +#define make_atomic_fas_body(S) \ + asm volatile ("xchg %0, %1;" : "+r" (v) , "+m" (*a)) +#define make_atomic_cas_body(S) \ + asm volatile (LOCK_prefix "; cmpxchg %3, %0; setz %2;" \ + : "+m" (*a), "+a" (*cmp), "=q" (ret): "r" (set)) + +#ifdef MY_ATOMIC_MODE_DUMMY +#define make_atomic_load_body(S) ret=*a +#define make_atomic_store_body(S) *a=v +#else +/* + Actually 32-bit reads/writes are always atomic on x86 + But we add LOCK_prefix here anyway to force memory barriers +*/ +#define make_atomic_load_body(S) \ + ret=0; \ + asm volatile (LOCK_prefix "; cmpxchg %2, %0" \ + : "+m" (*a), "+a" (ret): "r" (ret)) +#define make_atomic_store_body(S) \ + asm volatile ("; xchg %0, %1;" : "+m" (*a), "+r" (v)) +#endif + +/* TODO test on intel whether the below helps. on AMD it makes no difference */ +//#define LF_BACKOFF ({asm volatile ("rep; nop"); 1; }) + |