Sparc32 already supported it, as a consequence of using the generic atomic64 implementation. And the sparc64 implementation is rather trivial. This allows us to set ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE for all of sparc, and avoid the annoying warning from lib/atomic64_test.c Signed-off-by: David S. Miller <davem@davemloft.net>
		
			
				
	
	
		
			133 lines
		
	
	
	
		
			2.8 KiB
			
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
			
		
		
	
	
			133 lines
		
	
	
	
		
			2.8 KiB
			
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
/* atomic.S: These things are too big to do inline.
 | 
						|
 *
 | 
						|
 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
 | 
						|
 */
 | 
						|
 | 
						|
#include <linux/linkage.h>
 | 
						|
#include <asm/asi.h>
 | 
						|
#include <asm/backoff.h>
 | 
						|
 | 
						|
	.text
 | 
						|
 | 
						|
	/* Two versions of the atomic routines, one that
 | 
						|
	 * does not return a value and does not perform
 | 
						|
	 * memory barriers, and a second which returns
 | 
						|
	 * a value and does the barriers.
 | 
						|
	 */
 | 
						|
ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	lduw	[%o1], %g1
 | 
						|
	add	%g1, %o0, %g7
 | 
						|
	cas	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 nop
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic_add)
 | 
						|
 | 
						|
ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	lduw	[%o1], %g1
 | 
						|
	sub	%g1, %o0, %g7
 | 
						|
	cas	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 nop
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic_sub)
 | 
						|
 | 
						|
ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	lduw	[%o1], %g1
 | 
						|
	add	%g1, %o0, %g7
 | 
						|
	cas	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 add	%g1, %o0, %g1
 | 
						|
	retl
 | 
						|
	 sra	%g1, 0, %o0
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic_add_ret)
 | 
						|
 | 
						|
ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	lduw	[%o1], %g1
 | 
						|
	sub	%g1, %o0, %g7
 | 
						|
	cas	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%icc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 sub	%g1, %o0, %g1
 | 
						|
	retl
 | 
						|
	 sra	%g1, 0, %o0
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic_sub_ret)
 | 
						|
 | 
						|
ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	ldx	[%o1], %g1
 | 
						|
	add	%g1, %o0, %g7
 | 
						|
	casx	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 nop
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic64_add)
 | 
						|
 | 
						|
ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	ldx	[%o1], %g1
 | 
						|
	sub	%g1, %o0, %g7
 | 
						|
	casx	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 nop
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic64_sub)
 | 
						|
 | 
						|
ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	ldx	[%o1], %g1
 | 
						|
	add	%g1, %o0, %g7
 | 
						|
	casx	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 add	%g1, %o0, %o0
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic64_add_ret)
 | 
						|
 | 
						|
ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	ldx	[%o1], %g1
 | 
						|
	sub	%g1, %o0, %g7
 | 
						|
	casx	[%o1], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
	retl
 | 
						|
	 sub	%g1, %o0, %o0
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic64_sub_ret)
 | 
						|
 | 
						|
ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
 | 
						|
	BACKOFF_SETUP(%o2)
 | 
						|
1:	ldx	[%o0], %g1
 | 
						|
	brlez,pn %g1, 3f
 | 
						|
	 sub	%g1, 1, %g7
 | 
						|
	casx	[%o0], %g1, %g7
 | 
						|
	cmp	%g1, %g7
 | 
						|
	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
 | 
						|
	 nop
 | 
						|
3:	retl
 | 
						|
	 sub	%g1, 1, %o0
 | 
						|
2:	BACKOFF_SPIN(%o2, %o3, 1b)
 | 
						|
ENDPROC(atomic64_dec_if_positive)
 |