@@ -34,13 +34,13 @@ extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned;
3434/* Can't use raw_spin_lock_irq because of #include problems, so
3535 * this is the substitute */
3636#define _atomic_spin_lock_irqsave (l ,f ) do { \
37- arch_spinlock_t *s = ATOMIC_HASH(l); \
37+ arch_spinlock_t *s = ATOMIC_HASH(l); \
3838 local_irq_save(f); \
3939 arch_spin_lock(s); \
4040} while(0)
4141
4242#define _atomic_spin_unlock_irqrestore (l ,f ) do { \
43- arch_spinlock_t *s = ATOMIC_HASH(l); \
43+ arch_spinlock_t *s = ATOMIC_HASH(l); \
4444 arch_spin_unlock(s); \
4545 local_irq_restore(f); \
4646} while(0)
@@ -85,7 +85,7 @@ static __inline__ void atomic_##op(int i, atomic_t *v) \
8585 _atomic_spin_lock_irqsave(v, flags); \
8686 v->counter c_op i; \
8787 _atomic_spin_unlock_irqrestore(v, flags); \
88- } \
88+ }
8989
9090#define ATOMIC_OP_RETURN (op , c_op ) \
9191static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
@@ -150,7 +150,7 @@ static __inline__ void atomic64_##op(s64 i, atomic64_t *v) \
150150 _atomic_spin_lock_irqsave(v, flags); \
151151 v->counter c_op i; \
152152 _atomic_spin_unlock_irqrestore(v, flags); \
153- } \
153+ }
154154
155155#define ATOMIC64_OP_RETURN (op , c_op ) \
156156static __inline__ s64 atomic64_##op##_return(s64 i, atomic64_t *v) \
0 commit comments