diff options
Diffstat (limited to 'arch/sparc')
| -rw-r--r-- | arch/sparc/include/asm/spinlock_32.h | 20 | ||||
| -rw-r--r-- | arch/sparc/include/asm/spinlock_64.h | 18 | 
2 files changed, 19 insertions, 19 deletions
diff --git a/arch/sparc/include/asm/spinlock_32.h b/arch/sparc/include/asm/spinlock_32.h index b2d8a67f727..9b0f2f53c81 100644 --- a/arch/sparc/include/asm/spinlock_32.h +++ b/arch/sparc/include/asm/spinlock_32.h @@ -10,12 +10,12 @@  #include <asm/psr.h> -#define __raw_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) +#define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) -#define __raw_spin_unlock_wait(lock) \ -	do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) +#define arch_spin_unlock_wait(lock) \ +	do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0) -static inline void __raw_spin_lock(arch_spinlock_t *lock) +static inline void arch_spin_lock(arch_spinlock_t *lock)  {  	__asm__ __volatile__(  	"\n1:\n\t" @@ -35,7 +35,7 @@ static inline void __raw_spin_lock(arch_spinlock_t *lock)  	: "g2", "memory", "cc");  } -static inline int __raw_spin_trylock(arch_spinlock_t *lock) +static inline int arch_spin_trylock(arch_spinlock_t *lock)  {  	unsigned int result;  	__asm__ __volatile__("ldstub [%1], %0" @@ -45,7 +45,7 @@ static inline int __raw_spin_trylock(arch_spinlock_t *lock)  	return (result == 0);  } -static inline void __raw_spin_unlock(arch_spinlock_t *lock) +static inline void arch_spin_unlock(arch_spinlock_t *lock)  {  	__asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory");  } @@ -176,13 +176,13 @@ static inline int arch_read_trylock(raw_rwlock_t *rw)  #define __raw_write_unlock(rw)	do { (rw)->lock = 0; } while(0) -#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) +#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)  #define __raw_read_lock_flags(rw, flags)   __raw_read_lock(rw)  #define __raw_write_lock_flags(rw, flags)  __raw_write_lock(rw) -#define _raw_spin_relax(lock)	cpu_relax() -#define _raw_read_relax(lock)	cpu_relax() -#define _raw_write_relax(lock)	cpu_relax() +#define arch_spin_relax(lock)	cpu_relax() +#define arch_read_relax(lock)	cpu_relax() +#define arch_write_relax(lock)	cpu_relax()  #define __raw_read_can_lock(rw) (!((rw)->lock & 0xff))  #define __raw_write_can_lock(rw) (!(rw)->lock) diff --git a/arch/sparc/include/asm/spinlock_64.h b/arch/sparc/include/asm/spinlock_64.h index 38e16c40efc..7cf58a2fcda 100644 --- a/arch/sparc/include/asm/spinlock_64.h +++ b/arch/sparc/include/asm/spinlock_64.h @@ -21,13 +21,13 @@   * the spinner sections must be pre-V9 branches.   */ -#define __raw_spin_is_locked(lp)	((lp)->lock != 0) +#define arch_spin_is_locked(lp)	((lp)->lock != 0) -#define __raw_spin_unlock_wait(lp)	\ +#define arch_spin_unlock_wait(lp)	\  	do {	rmb();			\  	} while((lp)->lock) -static inline void __raw_spin_lock(arch_spinlock_t *lock) +static inline void arch_spin_lock(arch_spinlock_t *lock)  {  	unsigned long tmp; @@ -46,7 +46,7 @@ static inline void __raw_spin_lock(arch_spinlock_t *lock)  	: "memory");  } -static inline int __raw_spin_trylock(arch_spinlock_t *lock) +static inline int arch_spin_trylock(arch_spinlock_t *lock)  {  	unsigned long result; @@ -59,7 +59,7 @@ static inline int __raw_spin_trylock(arch_spinlock_t *lock)  	return (result == 0UL);  } -static inline void __raw_spin_unlock(arch_spinlock_t *lock) +static inline void arch_spin_unlock(arch_spinlock_t *lock)  {  	__asm__ __volatile__(  "	stb		%%g0, [%0]" @@ -68,7 +68,7 @@ static inline void __raw_spin_unlock(arch_spinlock_t *lock)  	: "memory");  } -static inline void __raw_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) +static inline void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags)  {  	unsigned long tmp1, tmp2; @@ -222,9 +222,9 @@ static int inline arch_write_trylock(raw_rwlock_t *lock)  #define __raw_read_can_lock(rw)		(!((rw)->lock & 0x80000000UL))  #define __raw_write_can_lock(rw)	(!(rw)->lock) -#define _raw_spin_relax(lock)	cpu_relax() -#define _raw_read_relax(lock)	cpu_relax() -#define _raw_write_relax(lock)	cpu_relax() +#define arch_spin_relax(lock)	cpu_relax() +#define arch_read_relax(lock)	cpu_relax() +#define arch_write_relax(lock)	cpu_relax()  #endif /* !(__ASSEMBLY__) */  |