summaryrefslogtreecommitdiff
path: root/arch/sparc
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 16:18:47 -0800
committerDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 16:18:47 -0800
commit8a8b836b91aa170a383f2f360b73d3d23160d9d7 (patch)
tree875a635f634a869b801c4efa8f145c5b7b7db8e4 /arch/sparc
parent216da721b881838d639a3987bf8a825e6b4aacdd (diff)
downloadlinux-3.10-8a8b836b91aa170a383f2f360b73d3d23160d9d7.tar.gz
linux-3.10-8a8b836b91aa170a383f2f360b73d3d23160d9d7.tar.bz2
linux-3.10-8a8b836b91aa170a383f2f360b73d3d23160d9d7.zip
[SPARC]: Make bitops use same spinlocks as atomics.
Recent workqueue changes basically make this a formal requirement. Also, move atomic32.o from lib-y to obj-y since it exports symbols to modules. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc')
-rw-r--r--arch/sparc/kernel/sparc_ksyms.c8
-rw-r--r--arch/sparc/kernel/time.c5
-rw-r--r--arch/sparc/lib/Makefile4
-rw-r--r--arch/sparc/lib/atomic32.c39
-rw-r--r--arch/sparc/lib/bitops.S109
5 files changed, 42 insertions, 123 deletions
diff --git a/arch/sparc/kernel/sparc_ksyms.c b/arch/sparc/kernel/sparc_ksyms.c
index 33dadd9f287..d8e008a04e2 100644
--- a/arch/sparc/kernel/sparc_ksyms.c
+++ b/arch/sparc/kernel/sparc_ksyms.c
@@ -83,9 +83,6 @@ extern int __divdi3(int, int);
/* Private functions with odd calling conventions. */
extern void ___atomic24_add(void);
extern void ___atomic24_sub(void);
-extern void ___set_bit(void);
-extern void ___clear_bit(void);
-extern void ___change_bit(void);
extern void ___rw_read_enter(void);
extern void ___rw_read_try(void);
extern void ___rw_read_exit(void);
@@ -125,11 +122,6 @@ EXPORT_SYMBOL(pfn_base);
EXPORT_SYMBOL(___atomic24_add);
EXPORT_SYMBOL(___atomic24_sub);
-/* Bit operations. */
-EXPORT_SYMBOL(___set_bit);
-EXPORT_SYMBOL(___clear_bit);
-EXPORT_SYMBOL(___change_bit);
-
/* Per-CPU information table */
EXPORT_PER_CPU_SYMBOL(__cpu_data);
diff --git a/arch/sparc/kernel/time.c b/arch/sparc/kernel/time.c
index 6c7aa51b590..2fcce000d87 100644
--- a/arch/sparc/kernel/time.c
+++ b/arch/sparc/kernel/time.c
@@ -78,7 +78,6 @@ unsigned long profile_pc(struct pt_regs *regs)
extern char __copy_user_begin[], __copy_user_end[];
extern char __atomic_begin[], __atomic_end[];
extern char __bzero_begin[], __bzero_end[];
- extern char __bitops_begin[], __bitops_end[];
unsigned long pc = regs->pc;
@@ -88,9 +87,7 @@ unsigned long profile_pc(struct pt_regs *regs)
(pc >= (unsigned long) __atomic_begin &&
pc < (unsigned long) __atomic_end) ||
(pc >= (unsigned long) __bzero_begin &&
- pc < (unsigned long) __bzero_end) ||
- (pc >= (unsigned long) __bitops_begin &&
- pc < (unsigned long) __bitops_end))
+ pc < (unsigned long) __bzero_end))
pc = regs->u_regs[UREG_RETPC];
return pc;
}
diff --git a/arch/sparc/lib/Makefile b/arch/sparc/lib/Makefile
index 5db7e1d8538..9ddc5b9ce3b 100644
--- a/arch/sparc/lib/Makefile
+++ b/arch/sparc/lib/Makefile
@@ -7,7 +7,7 @@ EXTRA_AFLAGS := -ansi -DST_DIV0=0x02
lib-y := mul.o rem.o sdiv.o udiv.o umul.o urem.o ashrdi3.o memcpy.o memset.o \
strlen.o checksum.o blockops.o memscan.o memcmp.o strncmp.o \
strncpy_from_user.o divdi3.o udivdi3.o strlen_user.o \
- copy_user.o locks.o atomic.o atomic32.o bitops.o \
+ copy_user.o locks.o atomic.o \
lshrdi3.o ashldi3.o rwsem.o muldi3.o bitext.o
-obj-y += iomap.o
+obj-y += iomap.o atomic32.o
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c
index de84f8534ba..53ddcd9d1e6 100644
--- a/arch/sparc/lib/atomic32.c
+++ b/arch/sparc/lib/atomic32.c
@@ -76,3 +76,42 @@ void atomic_set(atomic_t *v, int i)
spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
}
EXPORT_SYMBOL(atomic_set);
+
+unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
+{
+ unsigned long old, flags;
+
+ spin_lock_irqsave(ATOMIC_HASH(addr), flags);
+ old = *addr;
+ *addr = old | mask;
+ spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
+
+ return old & mask;
+}
+EXPORT_SYMBOL(___set_bit);
+
+unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
+{
+ unsigned long old, flags;
+
+ spin_lock_irqsave(ATOMIC_HASH(addr), flags);
+ old = *addr;
+ *addr = old & ~mask;
+ spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
+
+ return old & mask;
+}
+EXPORT_SYMBOL(___clear_bit);
+
+unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
+{
+ unsigned long old, flags;
+
+ spin_lock_irqsave(ATOMIC_HASH(addr), flags);
+ old = *addr;
+ *addr = old ^ mask;
+ spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
+
+ return old & mask;
+}
+EXPORT_SYMBOL(___change_bit);
diff --git a/arch/sparc/lib/bitops.S b/arch/sparc/lib/bitops.S
deleted file mode 100644
index cb7fb66a40c..00000000000
--- a/arch/sparc/lib/bitops.S
+++ /dev/null
@@ -1,109 +0,0 @@
-/* bitops.S: Low level assembler bit operations.
- *
- * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
- */
-
-#include <asm/ptrace.h>
-#include <asm/psr.h>
-
- .text
- .align 4
-
- .globl __bitops_begin
-__bitops_begin:
-
- /* Take bits in %g2 and set them in word at %g1,
- * return whether bits were set in original value
- * in %g2. %g4 holds value to restore into %o7
- * in delay slot of jmpl return, %g3 + %g5 + %g7 can be
- * used as temporaries and thus is considered clobbered
- * by all callers.
- */
- .globl ___set_bit
-___set_bit:
- rd %psr, %g3
- nop; nop; nop;
- or %g3, PSR_PIL, %g5
- wr %g5, 0x0, %psr
- nop; nop; nop
-#ifdef CONFIG_SMP
- set bitops_spinlock, %g5
-2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
- orcc %g7, 0x0, %g0 ! Did we get it?
- bne 2b ! Nope...
-#endif
- ld [%g1], %g7
- or %g7, %g2, %g5
- and %g7, %g2, %g2
-#ifdef CONFIG_SMP
- st %g5, [%g1]
- set bitops_spinlock, %g5
- stb %g0, [%g5]
-#else
- st %g5, [%g1]
-#endif
- wr %g3, 0x0, %psr
- nop; nop; nop
- jmpl %o7, %g0
- mov %g4, %o7
-
- /* Same as above, but clears the bits from %g2 instead. */
- .globl ___clear_bit
-___clear_bit:
- rd %psr, %g3
- nop; nop; nop
- or %g3, PSR_PIL, %g5
- wr %g5, 0x0, %psr
- nop; nop; nop
-#ifdef CONFIG_SMP
- set bitops_spinlock, %g5
-2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
- orcc %g7, 0x0, %g0 ! Did we get it?
- bne 2b ! Nope...
-#endif
- ld [%g1], %g7
- andn %g7, %g2, %g5
- and %g7, %g2, %g2
-#ifdef CONFIG_SMP
- st %g5, [%g1]
- set bitops_spinlock, %g5
- stb %g0, [%g5]
-#else
- st %g5, [%g1]
-#endif
- wr %g3, 0x0, %psr
- nop; nop; nop
- jmpl %o7, %g0
- mov %g4, %o7
-
- /* Same thing again, but this time toggles the bits from %g2. */
- .globl ___change_bit
-___change_bit:
- rd %psr, %g3
- nop; nop; nop
- or %g3, PSR_PIL, %g5
- wr %g5, 0x0, %psr
- nop; nop; nop
-#ifdef CONFIG_SMP
- set bitops_spinlock, %g5
-2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
- orcc %g7, 0x0, %g0 ! Did we get it?
- bne 2b ! Nope...
-#endif
- ld [%g1], %g7
- xor %g7, %g2, %g5
- and %g7, %g2, %g2
-#ifdef CONFIG_SMP
- st %g5, [%g1]
- set bitops_spinlock, %g5
- stb %g0, [%g5]
-#else
- st %g5, [%g1]
-#endif
- wr %g3, 0x0, %psr
- nop; nop; nop
- jmpl %o7, %g0
- mov %g4, %o7
-
- .globl __bitops_end
-__bitops_end: