#endif
#endif
+#ifdef __NO_LWSYNC__
+#define LWSYNC_OPCODE "sync\n"
+#else
+#define LWSYNC_OPCODE "lwsync\n"
+#endif
+
#ifndef BITS_PER_LONG
#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
#endif
unsigned int result;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"stwcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
unsigned long result;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"stdcx. %2,0,%1\n" /* else store conditional */
"bne- 1b\n" /* retry if lost reservation */
unsigned int old_val;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
unsigned long old_val;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"cmpd %0,%3\n" /* if load is not equal to */
"bne 2f\n" /* old, fail */
unsigned int result;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "lwarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stwcx. %0,0,%1\n" /* store conditional */
unsigned long result;
__asm__ __volatile__(
- "lwsync\n"
+ LWSYNC_OPCODE
"1:\t" "ldarx %0,0,%1\n" /* load and reserve */
"add %0,%2,%0\n" /* add val to value loaded */
"stdcx. %0,0,%1\n" /* store conditional */