| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | /*
 | 
					
						
							| 
									
										
										
										
											2008-08-02 10:55:55 +01:00
										 |  |  |  *  arch/arm/include/asm/assembler.h | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  |  * | 
					
						
							|  |  |  |  *  Copyright (C) 1996-2000 Russell King | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  * This program is free software; you can redistribute it and/or modify | 
					
						
							|  |  |  |  * it under the terms of the GNU General Public License version 2 as | 
					
						
							|  |  |  |  * published by the Free Software Foundation. | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  *  This file contains arm architecture specific defines | 
					
						
							|  |  |  |  *  for the different processors. | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  *  Do not include any C declarations in this file - it is included by | 
					
						
							|  |  |  |  *  assembler source. | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
										
										
											2011-06-13 06:46:44 +01:00
										 |  |  | #ifndef __ASM_ASSEMBLER_H__
 | 
					
						
							|  |  |  | #define __ASM_ASSEMBLER_H__
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | #ifndef __ASSEMBLY__
 | 
					
						
							|  |  |  | #error "Only include this from assembly code"
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #include <asm/ptrace.h>
 | 
					
						
							| 
									
										
										
										
											2010-09-13 16:03:21 +01:00
										 |  |  | #include <asm/domain.h>
 | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | #include <asm/opcodes-virt.h>
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-03-10 10:30:31 -06:00
										 |  |  | #define IOMEM(x)	(x)
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Endian independent macros for shifting bytes within registers. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | #ifndef __ARMEB__
 | 
					
						
							|  |  |  | #define pull            lsr
 | 
					
						
							|  |  |  | #define push            lsl
 | 
					
						
							|  |  |  | #define get_byte_0      lsl #0
 | 
					
						
							|  |  |  | #define get_byte_1	lsr #8
 | 
					
						
							|  |  |  | #define get_byte_2	lsr #16
 | 
					
						
							|  |  |  | #define get_byte_3	lsr #24
 | 
					
						
							|  |  |  | #define put_byte_0      lsl #0
 | 
					
						
							|  |  |  | #define put_byte_1	lsl #8
 | 
					
						
							|  |  |  | #define put_byte_2	lsl #16
 | 
					
						
							|  |  |  | #define put_byte_3	lsl #24
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define pull            lsl
 | 
					
						
							|  |  |  | #define push            lsr
 | 
					
						
							|  |  |  | #define get_byte_0	lsr #24
 | 
					
						
							|  |  |  | #define get_byte_1	lsr #16
 | 
					
						
							|  |  |  | #define get_byte_2	lsr #8
 | 
					
						
							|  |  |  | #define get_byte_3      lsl #0
 | 
					
						
							|  |  |  | #define put_byte_0	lsl #24
 | 
					
						
							|  |  |  | #define put_byte_1	lsl #16
 | 
					
						
							|  |  |  | #define put_byte_2	lsl #8
 | 
					
						
							|  |  |  | #define put_byte_3      lsl #0
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /*
 | 
					
						
							|  |  |  |  * Data preload for architectures that support it | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | #if __LINUX_ARM_ARCH__ >= 5
 | 
					
						
							|  |  |  | #define PLD(code...)	code
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define PLD(code...)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2008-03-31 12:38:31 -04:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * This can be used to enable code to cacheline align the destination | 
					
						
							|  |  |  |  * pointer when bulk writing to memory.  Experiments on StrongARM and | 
					
						
							|  |  |  |  * XScale didn't show this a worthwhile thing to do when the cache is not | 
					
						
							|  |  |  |  * set to write-allocate (this would need further testing on XScale when WA | 
					
						
							|  |  |  |  * is used). | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  * On Feroceon there is much to gain however, regardless of cache mode. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | #ifdef CONFIG_CPU_FEROCEON
 | 
					
						
							|  |  |  | #define CALGN(code...) code
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define CALGN(code...)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | /*
 | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  |  * Enable and disable interrupts | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  |  */ | 
					
						
							| 
									
										
										
										
											2005-11-09 15:04:22 +00:00
										 |  |  | #if __LINUX_ARM_ARCH__ >= 6
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro	disable_irq_notrace | 
					
						
							| 
									
										
										
										
											2005-11-09 15:04:22 +00:00
										 |  |  | 	cpsid	i | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro	enable_irq_notrace | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | 	cpsie	i | 
					
						
							|  |  |  | 	.endm | 
					
						
							| 
									
										
										
										
											2005-11-09 15:04:22 +00:00
										 |  |  | #else
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro	disable_irq_notrace | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | 	msr	cpsr_c, #PSR_I_BIT | SVC_MODE | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro	enable_irq_notrace | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | 	msr	cpsr_c, #SVC_MODE | 
					
						
							|  |  |  | 	.endm | 
					
						
							| 
									
										
										
										
											2005-11-09 15:04:22 +00:00
										 |  |  | #endif
 | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro asm_trace_hardirqs_off | 
					
						
							|  |  |  | #if defined(CONFIG_TRACE_IRQFLAGS)
 | 
					
						
							|  |  |  | 	stmdb   sp!, {r0-r3, ip, lr} | 
					
						
							|  |  |  | 	bl	trace_hardirqs_off | 
					
						
							|  |  |  | 	ldmia	sp!, {r0-r3, ip, lr} | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro asm_trace_hardirqs_on_cond, cond | 
					
						
							|  |  |  | #if defined(CONFIG_TRACE_IRQFLAGS)
 | 
					
						
							|  |  |  | 	/*
 | 
					
						
							|  |  |  | 	 * actually the registers should be pushed and pop'd conditionally, but | 
					
						
							|  |  |  | 	 * after bl the flags are certainly clobbered | 
					
						
							|  |  |  | 	 */ | 
					
						
							|  |  |  | 	stmdb   sp!, {r0-r3, ip, lr} | 
					
						
							|  |  |  | 	bl\cond	trace_hardirqs_on | 
					
						
							|  |  |  | 	ldmia	sp!, {r0-r3, ip, lr} | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro asm_trace_hardirqs_on | 
					
						
							|  |  |  | 	asm_trace_hardirqs_on_cond al | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro disable_irq | 
					
						
							|  |  |  | 	disable_irq_notrace | 
					
						
							|  |  |  | 	asm_trace_hardirqs_off | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro enable_irq | 
					
						
							|  |  |  | 	asm_trace_hardirqs_on | 
					
						
							|  |  |  | 	enable_irq_notrace | 
					
						
							|  |  |  | 	.endm | 
					
						
							| 
									
										
										
										
											2006-03-23 16:59:37 +00:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Save the current IRQ state and disable IRQs.  Note that this macro | 
					
						
							|  |  |  |  * assumes FIQs are enabled, and that the processor is in SVC mode. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | 	.macro	save_and_disable_irqs, oldcpsr | 
					
						
							|  |  |  | 	mrs	\oldcpsr, cpsr | 
					
						
							|  |  |  | 	disable_irq | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-02-15 16:01:42 +01:00
										 |  |  | 	.macro	save_and_disable_irqs_notrace, oldcpsr | 
					
						
							|  |  |  | 	mrs	\oldcpsr, cpsr | 
					
						
							|  |  |  | 	disable_irq_notrace | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Restore interrupt state previously stored in a register.  We don't | 
					
						
							|  |  |  |  * guarantee that this will preserve the flags. | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro	restore_irqs_notrace, oldcpsr | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | 	msr	cpsr_c, \oldcpsr | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-08-13 20:38:17 +02:00
										 |  |  | 	.macro restore_irqs, oldcpsr | 
					
						
							|  |  |  | 	tst	\oldcpsr, #PSR_I_BIT | 
					
						
							|  |  |  | 	asm_trace_hardirqs_on_cond eq | 
					
						
							|  |  |  | 	restore_irqs_notrace \oldcpsr | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | #define USER(x...)				\
 | 
					
						
							|  |  |  | 9999:	x;					\ | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.pushsection __ex_table,"a";		\ | 
					
						
							| 
									
										
										
										
											2005-04-16 15:20:36 -07:00
										 |  |  | 	.align	3;				\ | 
					
						
							|  |  |  | 	.long	9999b,9001f;			\ | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.popsection | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | #ifdef CONFIG_SMP
 | 
					
						
							|  |  |  | #define ALT_SMP(instr...)					\
 | 
					
						
							|  |  |  | 9998:	instr | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Note: if you get assembler errors from ALT_UP() when building with | 
					
						
							|  |  |  |  * CONFIG_THUMB2_KERNEL, you almost certainly need to use | 
					
						
							|  |  |  |  * ALT_SMP( W(instr) ... ) | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | #define ALT_UP(instr...)					\
 | 
					
						
							|  |  |  | 	.pushsection ".alt.smp.init", "a"			;\ | 
					
						
							|  |  |  | 	.long	9998b						;\ | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 9997:	instr							;\ | 
					
						
							|  |  |  | 	.if . - 9997b != 4					;\ | 
					
						
							|  |  |  | 		.error "ALT_UP() content must assemble to exactly 4 bytes";\ | 
					
						
							|  |  |  | 	.endif							;\ | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | 	.popsection | 
					
						
							|  |  |  | #define ALT_UP_B(label)					\
 | 
					
						
							|  |  |  | 	.equ	up_b_offset, label - 9998b			;\ | 
					
						
							|  |  |  | 	.pushsection ".alt.smp.init", "a"			;\ | 
					
						
							|  |  |  | 	.long	9998b						;\ | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	W(b)	. + up_b_offset					;\ | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | 	.popsection | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define ALT_SMP(instr...)
 | 
					
						
							|  |  |  | #define ALT_UP(instr...) instr
 | 
					
						
							|  |  |  | #define ALT_UP_B(label) b label
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2011-11-22 17:30:28 +00:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Instruction barrier | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | 	.macro	instr_sync | 
					
						
							|  |  |  | #if __LINUX_ARM_ARCH__ >= 7
 | 
					
						
							|  |  |  | 	isb | 
					
						
							|  |  |  | #elif __LINUX_ARM_ARCH__ == 6
 | 
					
						
							|  |  |  | 	mcr	p15, 0, r0, c7, c5, 4 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * SMP data memory barrier | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	.macro	smp_dmb mode | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | #ifdef CONFIG_SMP
 | 
					
						
							|  |  |  | #if __LINUX_ARM_ARCH__ >= 7
 | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	.ifeqs "\mode","arm" | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | 	ALT_SMP(dmb) | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	.else | 
					
						
							|  |  |  | 	ALT_SMP(W(dmb)) | 
					
						
							|  |  |  | 	.endif | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | #elif __LINUX_ARM_ARCH__ == 6
 | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | 	ALT_SMP(mcr	p15, 0, r0, c7, c10, 5)	@ dmb | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #error Incompatible SMP platform
 | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | #endif
 | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	.ifeqs "\mode","arm" | 
					
						
							| 
									
										
										
										
											2010-09-04 10:47:48 +01:00
										 |  |  | 	ALT_UP(nop) | 
					
						
							| 
									
										
											  
											
												ARM: 6516/1: Allow SMP_ON_UP to work with Thumb-2 kernels.
  * __fixup_smp_on_up has been modified with support for the
    THUMB2_KERNEL case.  For THUMB2_KERNEL only, fixups are split
    into halfwords in case of misalignment, since we can't rely on
    unaligned accesses working before turning the MMU on.
    No attempt is made to optimise the aligned case, since the
    number of fixups is typically small, and it seems best to keep
    the code as simple as possible.
  * Add a rotate in the fixup_smp code in order to support
    CPU_BIG_ENDIAN, as suggested by Nicolas Pitre.
  * Add an assembly-time sanity-check to ALT_UP() to ensure that
    the content really is the right size (4 bytes).
    (No check is done for ALT_SMP().  Possibly, this could be fixed
    by splitting the two uses ot ALT_SMP() (ALT_SMP...SMP_UP versus
    ALT_SMP...SMP_UP_B) into two macros.  In the first case,
    ALT_SMP needs to expand to >= 4 bytes, not == 4.)
  * smp_mpidr.h (which implements ALT_SMP()/ALT_UP() manually due
    to macro limitations) has not been modified: the affected
    instruction (mov) has no 16-bit encoding, so the correct
    instruction size is satisfied in this case.
  * A "mode" parameter has been added to smp_dmb:
    smp_dmb arm @ assumes 4-byte instructions (for ARM code, e.g. kuser)
    smp_dmb     @ uses W() to ensure 4-byte instructions for ALT_SMP()
    This avoids assembly failures due to use of W() inside smp_dmb,
    when assembling pure-ARM code in the vectors page.
    There might be a better way to achieve this.
  * Kconfig: make SMP_ON_UP depend on
    (!THUMB2_KERNEL || !BIG_ENDIAN) i.e., THUMB2_KERNEL is now
    supported, but only if !BIG_ENDIAN (The fixup code for Thumb-2
    currently assumes little-endian order.)
Tested using a single generic realview kernel on:
	ARM RealView PB-A8 (CONFIG_THUMB2_KERNEL={n,y})
	ARM RealView PBX-A9 (SMP)
Signed-off-by: Dave Martin <dave.martin@linaro.org>
Acked-by: Nicolas Pitre <nicolas.pitre@linaro.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
											
										 
											2010-12-01 15:39:23 +01:00
										 |  |  | 	.else | 
					
						
							|  |  |  | 	ALT_UP(W(nop)) | 
					
						
							|  |  |  | 	.endif | 
					
						
							| 
									
										
										
										
											2009-05-25 20:58:00 +01:00
										 |  |  | #endif
 | 
					
						
							|  |  |  | 	.endm | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:54 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | #ifdef CONFIG_THUMB2_KERNEL
 | 
					
						
							|  |  |  | 	.macro	setmode, mode, reg | 
					
						
							|  |  |  | 	mov	\reg, #\mode | 
					
						
							|  |  |  | 	msr	cpsr_c, \reg | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | 	.macro	setmode, mode, reg | 
					
						
							|  |  |  | 	msr	cpsr_c, #\mode | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | #endif
 | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * Helper macro to enter SVC mode cleanly and mask interrupts. reg is | 
					
						
							|  |  |  |  * a scratch register for the macro to overwrite. | 
					
						
							|  |  |  |  * | 
					
						
							|  |  |  |  * This macro is intended for forcing the CPU into SVC mode at boot time. | 
					
						
							|  |  |  |  * you cannot return to the original mode. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | .macro safe_svcmode_maskall reg:req | 
					
						
							| 
									
										
										
										
											2012-12-10 18:35:22 +01:00
										 |  |  | #if __LINUX_ARM_ARCH__ >= 6
 | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | 	mrs	\reg , cpsr | 
					
						
							| 
									
										
										
										
											2012-12-03 15:39:43 +00:00
										 |  |  | 	eor	\reg, \reg, #HYP_MODE | 
					
						
							|  |  |  | 	tst	\reg, #MODE_MASK | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | 	bic	\reg , \reg , #MODE_MASK | 
					
						
							| 
									
										
										
										
											2012-12-03 15:39:43 +00:00
										 |  |  | 	orr	\reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | THUMB(	orr	\reg , \reg , #PSR_T_BIT	) | 
					
						
							|  |  |  | 	bne	1f | 
					
						
							| 
									
										
										
										
											2012-10-06 17:03:17 +01:00
										 |  |  | 	orr	\reg, \reg, #PSR_A_BIT | 
					
						
							|  |  |  | 	adr	lr, BSYM(2f) | 
					
						
							|  |  |  | 	msr	spsr_cxsf, \reg | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | 	__MSR_ELR_HYP(14) | 
					
						
							|  |  |  | 	__ERET | 
					
						
							| 
									
										
										
										
											2012-10-06 17:03:17 +01:00
										 |  |  | 1:	msr	cpsr_c, \reg | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | 2: | 
					
						
							| 
									
										
										
										
											2012-12-10 18:35:22 +01:00
										 |  |  | #else
 | 
					
						
							|  |  |  | /*
 | 
					
						
							|  |  |  |  * workaround for possibly broken pre-v6 hardware | 
					
						
							|  |  |  |  * (akita, Sharp Zaurus C-1000, PXA270-based) | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | 	setmode	PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg | 
					
						
							|  |  |  | #endif
 | 
					
						
							| 
									
										
										
										
											2012-02-09 08:47:17 -08:00
										 |  |  | .endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | /*
 | 
					
						
							|  |  |  |  * STRT/LDRT access macros with ARM and Thumb-2 variants | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | #ifdef CONFIG_THUMB2_KERNEL
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-01-25 11:38:13 +01:00
										 |  |  | 	.macro	usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 9999: | 
					
						
							|  |  |  | 	.if	\inc == 1 | 
					
						
							| 
									
										
										
										
											2010-09-13 16:03:21 +01:00
										 |  |  | 	\instr\cond\()b\()\t\().w \reg, [\ptr, #\off] | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.elseif	\inc == 4 | 
					
						
							| 
									
										
										
										
											2010-09-13 16:03:21 +01:00
										 |  |  | 	\instr\cond\()\t\().w \reg, [\ptr, #\off] | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.else | 
					
						
							|  |  |  | 	.error	"Unsupported inc macro argument" | 
					
						
							|  |  |  | 	.endif | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.pushsection __ex_table,"a" | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.align	3 | 
					
						
							|  |  |  | 	.long	9999b, \abort | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.popsection | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort | 
					
						
							|  |  |  | 	@ explicit IT instruction needed because of the label | 
					
						
							|  |  |  | 	@ introduced by the USER macro | 
					
						
							|  |  |  | 	.ifnc	\cond,al | 
					
						
							|  |  |  | 	.if	\rept == 1 | 
					
						
							|  |  |  | 	itt	\cond | 
					
						
							|  |  |  | 	.elseif	\rept == 2 | 
					
						
							|  |  |  | 	ittt	\cond | 
					
						
							|  |  |  | 	.else | 
					
						
							|  |  |  | 	.error	"Unsupported rept macro argument" | 
					
						
							|  |  |  | 	.endif | 
					
						
							|  |  |  | 	.endif | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	@ Slightly optimised to avoid incrementing the pointer twice | 
					
						
							|  |  |  | 	usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | 
					
						
							|  |  |  | 	.if	\rept == 2 | 
					
						
							| 
									
										
										
										
											2010-11-19 13:18:31 +01:00
										 |  |  | 	usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.endif | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	add\cond \ptr, #\rept * \inc | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #else	/* !CONFIG_THUMB2_KERNEL */
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-01-25 11:38:13 +01:00
										 |  |  | 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.rept	\rept | 
					
						
							|  |  |  | 9999: | 
					
						
							|  |  |  | 	.if	\inc == 1 | 
					
						
							| 
									
										
										
										
											2010-09-13 16:03:21 +01:00
										 |  |  | 	\instr\cond\()b\()\t \reg, [\ptr], #\inc | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.elseif	\inc == 4 | 
					
						
							| 
									
										
										
										
											2010-09-13 16:03:21 +01:00
										 |  |  | 	\instr\cond\()\t \reg, [\ptr], #\inc | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.else | 
					
						
							|  |  |  | 	.error	"Unsupported inc macro argument" | 
					
						
							|  |  |  | 	.endif | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.pushsection __ex_table,"a" | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.align	3 | 
					
						
							|  |  |  | 	.long	9999b, \abort | 
					
						
							| 
									
										
										
										
											2010-04-19 10:15:03 +01:00
										 |  |  | 	.popsection | 
					
						
							| 
									
										
										
										
											2009-07-24 12:32:57 +01:00
										 |  |  | 	.endr | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #endif	/* CONFIG_THUMB2_KERNEL */
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro	strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | 
					
						
							|  |  |  | 	usracc	str, \reg, \ptr, \inc, \cond, \rept, \abort | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 	.macro	ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | 
					
						
							|  |  |  | 	usracc	ldr, \reg, \ptr, \inc, \cond, \rept, \abort | 
					
						
							|  |  |  | 	.endm | 
					
						
							| 
									
										
										
										
											2011-06-23 17:10:05 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Utility macro for declaring string literals */ | 
					
						
							|  |  |  | 	.macro	string name:req, string | 
					
						
							|  |  |  | 	.type \name , #object | 
					
						
							|  |  |  | \name: | 
					
						
							|  |  |  | 	.asciz "\string" | 
					
						
							|  |  |  | 	.size \name , . - \name | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-09-07 18:22:28 +01:00
										 |  |  | 	.macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req | 
					
						
							|  |  |  | #ifndef CONFIG_CPU_USE_DOMAINS
 | 
					
						
							|  |  |  | 	adds	\tmp, \addr, #\size - 1 | 
					
						
							|  |  |  | 	sbcccs	\tmp, \tmp, \limit | 
					
						
							|  |  |  | 	bcs	\bad | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 	.endm | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2011-06-13 06:46:44 +01:00
										 |  |  | #endif /* __ASM_ASSEMBLER_H__ */
 |