struct __xchg_dummy { unsigned long a[100]; };
#define __xg(x) ((volatile struct __xchg_dummy *)(x))
+extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
+
#ifndef CONFIG_RMW_INSNS
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
x = tmp;
break;
default:
- BUG();
+ tmp = __invalid_xchg_size(x, ptr, size);
+ break;
}
local_irq_restore(flags);
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
switch (size) {
- case 1:
+ case 1:
__asm__ __volatile__
("moveb %2,%0\n\t"
"1:\n\t"
"jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
break;
- case 2:
+ case 2:
__asm__ __volatile__
("movew %2,%0\n\t"
"1:\n\t"
"jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
break;
- case 4:
+ case 4:
__asm__ __volatile__
("movel %2,%0\n\t"
"1:\n\t"
"jne 1b"
: "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
break;
+ default:
+ x = __invalid_xchg_size(x, ptr, size);
+ break;
}
return x;
}
#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+extern unsigned long __invalid_cmpxchg_size(volatile void *,
+ unsigned long, unsigned long, int);
+
/*
* Atomic compare and exchange. Compare OLD with MEM, if identical,
* store NEW in MEM. Return the initial value in MEM. Success is
: "=d" (old), "=m" (*(int *)p)
: "d" (new), "0" (old), "m" (*(int *)p));
break;
+ default:
+ old = __invalid_cmpxchg_size(p, old, new, size);
+ break;
}
return old;
}