EXPORT_SYMBOL(__write_lock);
EXPORT_SYMBOL(__write_unlock);
EXPORT_SYMBOL(__write_trylock);
+/* Out of line spin-locking implementation. */
+EXPORT_SYMBOL(_raw_spin_lock_flags);
#endif
/* Hard IRQ locking */
VIScopy.o VISbzero.o VISmemset.o VIScsum.o VIScsumcopy.o \
VIScsumcopyusr.o VISsave.o atomic.o rwlock.o bitops.o \
U3memcpy.o U3copy_from_user.o U3copy_to_user.o \
- U3copy_in_user.o mcount.o ipcsum.o rwsem.o xor.o
+ U3copy_in_user.o mcount.o ipcsum.o rwsem.o xor.o splock.o
lib-$(CONFIG_DEBUG_SPINLOCK) += debuglocks.o
lib-$(CONFIG_HAVE_DEC_LOCK) += dec_and_lock.o
--- /dev/null
+/* splock.S: Spinlock primitives too large to inline.
+ *
+ * Copyright (C) 2004 David S. Miller (davem@redhat.com)
+ */
+
+ .text
+ .align 64
+
+ .globl _raw_spin_lock_flags
+_raw_spin_lock_flags: /* %o0 = lock_ptr, %o1 = irq_flags */
+1: ldstub [%o0], %g7
+ brnz,pn %g7, 2f
+ membar #StoreLoad | #StoreStore
+ retl
+ nop
+
+2: rdpr %pil, %g2 ! Save PIL
+ wrpr %o1, %pil ! Set previous PIL
+3: ldub [%o0], %g7 ! Spin on lock set
+ brnz,pt %g7, 3b
+ membar #LoadLoad
+ ba,pt %xcc, 1b ! Retry lock acquire
+ wrpr %g2, %pil ! Restore PIL
: "memory");
}
+extern void _raw_spin_lock_flags(spinlock_t *lock, unsigned long flags);
+
#else /* !(CONFIG_DEBUG_SPINLOCK) */
typedef struct {
#define _raw_spin_trylock(lp) _spin_trylock(lp)
#define _raw_spin_lock(lock) _do_spin_lock(lock, "spin_lock")
#define _raw_spin_unlock(lock) _do_spin_unlock(lock)
+#define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
#endif /* CONFIG_DEBUG_SPINLOCK */
-#define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
-
/* Multi-reader locks, these are much saner than the 32-bit Sparc ones... */
#ifndef CONFIG_DEBUG_SPINLOCK