18#if (AO_GNUC_PREREQ(4, 8) || AO_CLANG_PREREQ(3, 5)) \
19 && !defined(AO_DISABLE_GCC_ATOMICS)
21# define AO_GCC_ATOMIC_TEST_AND_SET
24#ifdef __native_client__
26# define AO_MASK_PTR(reg) " bical " reg ", " reg ", #0xc0000000\n"
27# define AO_BR_ALIGN " .align 4\n"
29# define AO_MASK_PTR(reg)
33#if defined(__thumb__) && !defined(__thumb2__)
37# define AO_THUMB_GO_ARM \
44# define AO_THUMB_RESTORE_MODE \
50# define AO_THUMB_SWITCH_CLOBBERS "r3",
52# define AO_THUMB_GO_ARM
53# define AO_THUMB_RESTORE_MODE
54# define AO_THUMB_SWITCH_CLOBBERS
59#if !defined(__ARM_ARCH_2__) && !defined(__ARM_ARCH_3__) \
60 && !defined(__ARM_ARCH_3M__) && !defined(__ARM_ARCH_4__) \
61 && !defined(__ARM_ARCH_4T__) \
62 && ((!defined(__ARM_ARCH_5__) && !defined(__ARM_ARCH_5E__) \
63 && !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__) \
64 && !defined(__ARM_ARCH_5TEJ__) && !defined(__ARM_ARCH_6M__)) \
65 || defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) \
66 || defined(__ARM_ARCH_8A__))
67# define AO_ARM_HAVE_LDREX
68# if !defined(__ARM_ARCH_6__) && !defined(__ARM_ARCH_6J__) \
69 && !defined(__ARM_ARCH_6T2__)
71# define AO_ARM_HAVE_LDREXBH
73# if !defined(__ARM_ARCH_6__) && !defined(__ARM_ARCH_6J__) \
74 && !defined(__ARM_ARCH_6T2__) && !defined(__ARM_ARCH_6Z__) \
75 && !defined(__ARM_ARCH_6ZT2__)
76# if !defined(__ARM_ARCH_6K__) && !defined(__ARM_ARCH_6KZ__) \
77 && !defined(__ARM_ARCH_6ZK__)
79# define AO_ARM_HAVE_DMB
81# if (!defined(__thumb__) \
82 || (defined(__thumb2__) && !defined(__ARM_ARCH_7__) \
83 && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__))) \
84 && (!defined(__clang__) || AO_CLANG_PREREQ(3, 3))
90# define AO_ARM_HAVE_LDREXD
95#if !defined(__ARM_ARCH_2__) && !defined(__ARM_ARCH_6M__) \
96 && !defined(__ARM_ARCH_8A__) && !defined(__thumb2__)
97# define AO_ARM_HAVE_SWP
102#if !defined(AO_UNIPROCESSOR) && defined(AO_ARM_HAVE_DMB) \
103 && !defined(AO_PREFER_BUILTIN_ATOMICS)
118 __asm__ __volatile__(
"dmb ishst" : : :
"memory");
120# define AO_HAVE_nop_write
123#ifndef AO_GCC_ATOMIC_TEST_AND_SET
125#ifdef AO_UNIPROCESSOR
133# define AO_HAVE_nop_full
135#elif defined(AO_ARM_HAVE_DMB)
144 __asm__ __volatile__(
"dmb" : : :
"memory");
146# define AO_HAVE_nop_full
148#elif defined(AO_ARM_HAVE_LDREX)
158 __asm__ __volatile__(
"@AO_nop_full\n"
160 " mcr p15,0,%0,c7,c10,5\n"
166# define AO_HAVE_nop_full
174#ifdef AO_ARM_HAVE_LDREX
191# ifdef AO_BROKEN_TASKSWITCH_CLREX
193# define AO_SKIPATOMIC_store
194# define AO_SKIPATOMIC_store_release
195# define AO_SKIPATOMIC_char_store
196# define AO_SKIPATOMIC_char_store_release
197# define AO_SKIPATOMIC_short_store
198# define AO_SKIPATOMIC_short_store_release
199# define AO_SKIPATOMIC_int_store
200# define AO_SKIPATOMIC_int_store_release
202# ifndef AO_PREFER_BUILTIN_ATOMICS
208 __asm__ __volatile__(
"@AO_store\n"
214 " strex %0, %3, [%2]\n"
218 :
"=&r" (flag),
"+m" (*addr)
219 :
"r" (addr),
"r" (value)
222# define AO_HAVE_store
224# ifdef AO_ARM_HAVE_LDREXBH
230 __asm__ __volatile__(
"@AO_char_store\n"
236 " strexb %0, %3, [%2]\n"
240 :
"=&r" (flag),
"+m" (*addr)
241 :
"r" (addr),
"r" (value)
244# define AO_HAVE_char_store
247 unsigned short value)
251 __asm__ __volatile__(
"@AO_short_store\n"
257 " strexh %0, %3, [%2]\n"
261 :
"=&r" (flag),
"+m" (*addr)
262 :
"r" (addr),
"r" (value)
265# define AO_HAVE_short_store
270# elif !defined(AO_GCC_ATOMIC_TEST_AND_SET)
271# include "../loadstore/atomic_store.h"
277#ifndef AO_GCC_ATOMIC_TEST_AND_SET
279# include "../test_and_set_t_is_ao_t.h"
281#ifdef AO_ARM_HAVE_LDREX
285# define AO_ACCESS_CHECK_ALIGNED
286# define AO_ACCESS_short_CHECK_ALIGNED
287# define AO_ACCESS_int_CHECK_ALIGNED
288# include "../all_atomic_only_load.h"
290# ifndef AO_HAVE_char_store
291# include "../loadstore/char_atomic_store.h"
292# include "../loadstore/short_atomic_store.h"
305#ifndef AO_PREFER_GENERALIZED
306#if !defined(AO_FORCE_USE_SWP) || !defined(AO_ARM_HAVE_SWP)
319 __asm__ __volatile__(
"@AO_test_and_set\n"
325 " strex %1, %4, [%3]\n"
329 :
"=&r"(oldval),
"=&r"(flag),
"+m"(*addr)
334# define AO_HAVE_test_and_set
343 __asm__ __volatile__(
"@AO_fetch_and_add\n"
350 " strex %1, %2, [%5]\n"
354 :
"=&r"(
result),
"=&r"(flag),
"=&r"(
tmp),
"+m"(*p)
359#define AO_HAVE_fetch_and_add
367 __asm__ __volatile__(
"@AO_fetch_and_add1\n"
374 " strex %2, %1, [%4]\n"
378 :
"=&r"(
result),
"=&r"(
tmp),
"=&r"(flag),
"+m"(*p)
383#define AO_HAVE_fetch_and_add1
391 __asm__ __volatile__(
"@AO_fetch_and_sub1\n"
398 " strex %2, %1, [%4]\n"
402 :
"=&r"(
result),
"=&r"(
tmp),
"=&r"(flag),
"+m"(*p)
407#define AO_HAVE_fetch_and_sub1
414 __asm__ __volatile__(
"@AO_and\n"
421 " strex %0, %1, [%4]\n"
426 :
"r" (value),
"r" (p)
436 __asm__ __volatile__(
"@AO_or\n"
443 " strex %0, %1, [%4]\n"
448 :
"r" (value),
"r" (p)
458 __asm__ __volatile__(
"@AO_xor\n"
465 " strex %0, %1, [%4]\n"
470 :
"r" (value),
"r" (p)
476#ifdef AO_ARM_HAVE_LDREXBH
483 __asm__ __volatile__(
"@AO_char_fetch_and_add\n"
490 " strexb %1, %2, [%5]\n"
494 :
"=&r" (
result),
"=&r" (flag),
"=&r" (
tmp),
"+m" (*p)
495 :
"r" ((
unsigned)incr),
"r" (p)
497 return (
unsigned char)
result;
499# define AO_HAVE_char_fetch_and_add
507 __asm__ __volatile__(
"@AO_short_fetch_and_add\n"
514 " strexh %1, %2, [%5]\n"
518 :
"=&r" (
result),
"=&r" (flag),
"=&r" (
tmp),
"+m" (*p)
519 :
"r" ((
unsigned)incr),
"r" (p)
521 return (
unsigned short)
result;
523# define AO_HAVE_short_fetch_and_add
526#ifndef AO_GENERALIZE_ASM_BOOL_CAS
533 __asm__ __volatile__(
"@AO_compare_and_swap\n"
546 " strexeq %0, %5, [%3]\n"
551 :
"r"(addr),
"r"(old_val),
"r"(new_val)
555# define AO_HAVE_compare_and_swap
564 __asm__ __volatile__(
"@AO_fetch_compare_and_swap\n"
575 " strexeq %0, %5, [%3]\n"
579 :
"=&r"(flag),
"=&r"(fetched_val),
"+m"(*addr)
580 :
"r"(addr),
"r"(old_val),
"r"(new_val)
584#define AO_HAVE_fetch_compare_and_swap
586#ifdef AO_ARM_HAVE_LDREXD
587# include "../standard_ao_double_t.h"
601 __asm__ __volatile__(
"@AO_double_load\n"
603 " ldrexd %0, %H0, [%1]"
609# define AO_HAVE_double_load
619 __asm__ __volatile__(
"@AO_double_store\n"
621 " ldrexd %0, %H0, [%3]\n"
623 " strexd %1, %4, %H4, [%3]"
624 :
"=&r" (old_val.
AO_whole),
"=&r" (status),
"+m" (*addr)
625 :
"r" (addr),
"r" (new_val.
AO_whole)
629# define AO_HAVE_double_store
640 __asm__ __volatile__(
"@AO_double_compare_and_swap\n"
642 " ldrexd %0, %H0, [%1]\n"
648 __asm__ __volatile__(
650 " strexd %0, %3, %H3, [%2]\n"
651 :
"=&r"(
result),
"+m"(*addr)
652 :
"r" (addr),
"r" (new_val.
AO_whole)
657# define AO_HAVE_double_compare_and_swap
667#include "../all_aligned_atomic_load_store.h"
673#if !defined(AO_HAVE_test_and_set_full) && !defined(AO_HAVE_test_and_set) \
674 && defined (AO_ARM_HAVE_SWP) && (!defined(AO_PREFER_GENERALIZED) \
675 || !defined(AO_HAVE_fetch_compare_and_swap))
688 __asm__ __volatile__(
"@AO_test_and_set_full\n"
691 " swp %0, %2, [%3]\n"
695 :
"=&r"(oldval),
"=&r"(addr)
700# define AO_HAVE_test_and_set_full
707# if defined(__clang__) && !defined(AO_ARM_HAVE_LDREX)
710# define AO_SKIPATOMIC_ANY_and_ANY
711# define AO_SKIPATOMIC_ANY_or_ANY
712# define AO_SKIPATOMIC_ANY_xor_ANY
715# ifdef AO_ARM_HAVE_LDREXD
716# include "../standard_ao_double_t.h"
722#undef AO_ARM_HAVE_DMB
723#undef AO_ARM_HAVE_LDREX
724#undef AO_ARM_HAVE_LDREXBH
725#undef AO_ARM_HAVE_LDREXD
726#undef AO_ARM_HAVE_SWP
729#undef AO_SKIPATOMIC_ANY_and_ANY
730#undef AO_SKIPATOMIC_ANY_or_ANY
731#undef AO_SKIPATOMIC_ANY_xor_ANY
732#undef AO_SKIPATOMIC_char_store
733#undef AO_SKIPATOMIC_char_store_release
734#undef AO_SKIPATOMIC_int_store
735#undef AO_SKIPATOMIC_int_store_release
736#undef AO_SKIPATOMIC_short_store
737#undef AO_SKIPATOMIC_short_store_release
738#undef AO_SKIPATOMIC_store
739#undef AO_SKIPATOMIC_store_release
740#undef AO_THUMB_GO_ARM
741#undef AO_THUMB_RESTORE_MODE
742#undef AO_THUMB_SWITCH_CLOBBERS
#define AO_compiler_barrier()
#define AO_EXPECT_FALSE(expr)
AO_INLINE AO_TS_VAL_t AO_test_and_set_full(volatile AO_TS_t *addr)
AO_INLINE void AO_and(volatile AO_t *p, AO_t value)
AO_INLINE int AO_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
AO_INLINE unsigned short AO_short_fetch_and_add(volatile unsigned short *p, unsigned short incr)
AO_INLINE unsigned char AO_char_fetch_and_add(volatile unsigned char *p, unsigned char incr)
AO_INLINE AO_t AO_fetch_and_add(volatile AO_t *p, AO_t incr)
AO_INLINE void AO_nop_write(void)
AO_INLINE int AO_double_compare_and_swap(volatile AO_double_t *addr, AO_double_t old_val, AO_double_t new_val)
AO_INLINE AO_t AO_fetch_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
AO_INLINE void AO_double_store(volatile AO_double_t *addr, AO_double_t new_val)
#define AO_THUMB_RESTORE_MODE
AO_INLINE void AO_or(volatile AO_t *p, AO_t value)
AO_INLINE AO_TS_VAL_t AO_test_and_set(volatile AO_TS_t *addr)
AO_INLINE void AO_xor(volatile AO_t *p, AO_t value)
AO_INLINE AO_t AO_fetch_and_sub1(volatile AO_t *p)
#define AO_THUMB_SWITCH_CLOBBERS
AO_INLINE AO_t AO_fetch_and_add1(volatile AO_t *p)
AO_INLINE AO_double_t AO_double_load(const volatile AO_double_t *addr)
AO_INLINE void AO_nop_full(void)
AO_INLINE void AO_char_store(volatile unsignedchar *addr, unsignedchar value)
AO_INLINE void AO_store(volatile AO_t *addr, AO_t value)
AO_INLINE void AO_short_store(volatile unsignedshort *addr, unsignedshort value)
unsigned long long double_ptr_storage
double_ptr_storage AO_whole