The bitops functions (e.g. _test_and_set_bit) on ARM do not have unwind annotations and therefore the kernel cannot backtrace out of them on a fatal error (for example, NULL pointer dereference). This patch annotates the bitops assembly macros with UNWIND annotations so that we can produce a meaningful backtrace on error. Callers of the macros are modified to pass their function name as a macro parameter, enforcing that the macros are used as standalone function implementations. Acked-by: Dave Martin <dave.martin@linaro.org> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
		
			
				
	
	
		
			95 lines
		
	
	
	
		
			1.9 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			95 lines
		
	
	
	
		
			1.9 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
#include <asm/unwind.h>
 | 
						|
 | 
						|
#if __LINUX_ARM_ARCH__ >= 6
 | 
						|
	.macro	bitop, name, instr
 | 
						|
ENTRY(	\name		)
 | 
						|
UNWIND(	.fnstart	)
 | 
						|
	ands	ip, r1, #3
 | 
						|
	strneb	r1, [ip]		@ assert word-aligned
 | 
						|
	mov	r2, #1
 | 
						|
	and	r3, r0, #31		@ Get bit offset
 | 
						|
	mov	r0, r0, lsr #5
 | 
						|
	add	r1, r1, r0, lsl #2	@ Get word offset
 | 
						|
	mov	r3, r2, lsl r3
 | 
						|
1:	ldrex	r2, [r1]
 | 
						|
	\instr	r2, r2, r3
 | 
						|
	strex	r0, r2, [r1]
 | 
						|
	cmp	r0, #0
 | 
						|
	bne	1b
 | 
						|
	bx	lr
 | 
						|
UNWIND(	.fnend		)
 | 
						|
ENDPROC(\name		)
 | 
						|
	.endm
 | 
						|
 | 
						|
	.macro	testop, name, instr, store
 | 
						|
ENTRY(	\name		)
 | 
						|
UNWIND(	.fnstart	)
 | 
						|
	ands	ip, r1, #3
 | 
						|
	strneb	r1, [ip]		@ assert word-aligned
 | 
						|
	mov	r2, #1
 | 
						|
	and	r3, r0, #31		@ Get bit offset
 | 
						|
	mov	r0, r0, lsr #5
 | 
						|
	add	r1, r1, r0, lsl #2	@ Get word offset
 | 
						|
	mov	r3, r2, lsl r3		@ create mask
 | 
						|
	smp_dmb
 | 
						|
1:	ldrex	r2, [r1]
 | 
						|
	ands	r0, r2, r3		@ save old value of bit
 | 
						|
	\instr	r2, r2, r3		@ toggle bit
 | 
						|
	strex	ip, r2, [r1]
 | 
						|
	cmp	ip, #0
 | 
						|
	bne	1b
 | 
						|
	smp_dmb
 | 
						|
	cmp	r0, #0
 | 
						|
	movne	r0, #1
 | 
						|
2:	bx	lr
 | 
						|
UNWIND(	.fnend		)
 | 
						|
ENDPROC(\name		)
 | 
						|
	.endm
 | 
						|
#else
 | 
						|
	.macro	bitop, name, instr
 | 
						|
ENTRY(	\name		)
 | 
						|
UNWIND(	.fnstart	)
 | 
						|
	ands	ip, r1, #3
 | 
						|
	strneb	r1, [ip]		@ assert word-aligned
 | 
						|
	and	r2, r0, #31
 | 
						|
	mov	r0, r0, lsr #5
 | 
						|
	mov	r3, #1
 | 
						|
	mov	r3, r3, lsl r2
 | 
						|
	save_and_disable_irqs ip
 | 
						|
	ldr	r2, [r1, r0, lsl #2]
 | 
						|
	\instr	r2, r2, r3
 | 
						|
	str	r2, [r1, r0, lsl #2]
 | 
						|
	restore_irqs ip
 | 
						|
	mov	pc, lr
 | 
						|
UNWIND(	.fnend		)
 | 
						|
ENDPROC(\name		)
 | 
						|
	.endm
 | 
						|
 | 
						|
/**
 | 
						|
 * testop - implement a test_and_xxx_bit operation.
 | 
						|
 * @instr: operational instruction
 | 
						|
 * @store: store instruction
 | 
						|
 *
 | 
						|
 * Note: we can trivially conditionalise the store instruction
 | 
						|
 * to avoid dirtying the data cache.
 | 
						|
 */
 | 
						|
	.macro	testop, name, instr, store
 | 
						|
ENTRY(	\name		)
 | 
						|
UNWIND(	.fnstart	)
 | 
						|
	ands	ip, r1, #3
 | 
						|
	strneb	r1, [ip]		@ assert word-aligned
 | 
						|
	and	r3, r0, #31
 | 
						|
	mov	r0, r0, lsr #5
 | 
						|
	save_and_disable_irqs ip
 | 
						|
	ldr	r2, [r1, r0, lsl #2]!
 | 
						|
	mov	r0, #1
 | 
						|
	tst	r2, r0, lsl r3
 | 
						|
	\instr	r2, r2, r0, lsl r3
 | 
						|
	\store	r2, [r1]
 | 
						|
	moveq	r0, #0
 | 
						|
	restore_irqs ip
 | 
						|
	mov	pc, lr
 | 
						|
UNWIND(	.fnend		)
 | 
						|
ENDPROC(\name		)
 | 
						|
	.endm
 | 
						|
#endif
 |