Signed-off-by: Yoshinori Sato <ysato@users.sourceforge.jp> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
		
			
				
	
	
		
			393 lines
		
	
	
	
		
			8.2 KiB
			
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
			
		
		
	
	
			393 lines
		
	
	
	
		
			8.2 KiB
			
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
/* -*- mode: asm -*-
 | 
						|
 *
 | 
						|
 *  linux/arch/h8300/platform/h8300h/entry.S
 | 
						|
 *
 | 
						|
 *  Yoshinori Sato <ysato@users.sourceforge.jp>
 | 
						|
 *  David McCullough <davidm@snapgear.com>
 | 
						|
 *
 | 
						|
 */
 | 
						|
 | 
						|
/*
 | 
						|
 *  entry.S
 | 
						|
 *  include exception/interrupt gateway
 | 
						|
 *          system call entry
 | 
						|
 */
 | 
						|
 | 
						|
#include <linux/sys.h>
 | 
						|
#include <asm/unistd.h>
 | 
						|
#include <asm/setup.h>
 | 
						|
#include <asm/segment.h>
 | 
						|
#include <asm/linkage.h>
 | 
						|
#include <asm/asm-offsets.h>
 | 
						|
#include <asm/thread_info.h>
 | 
						|
#include <asm/errno.h>
 | 
						|
 | 
						|
#if defined(CONFIG_CPU_H8300H)
 | 
						|
#define USERRET 8
 | 
						|
INTERRUPTS = 64
 | 
						|
	.h8300h
 | 
						|
	.macro	SHLL2 reg
 | 
						|
	shll.l	\reg
 | 
						|
	shll.l	\reg
 | 
						|
	.endm
 | 
						|
	.macro	SHLR2 reg
 | 
						|
	shlr.l	\reg
 | 
						|
	shlr.l	\reg
 | 
						|
	.endm
 | 
						|
	.macro	SAVEREGS
 | 
						|
	mov.l	er0,@-sp
 | 
						|
	mov.l	er1,@-sp
 | 
						|
	mov.l	er2,@-sp
 | 
						|
	mov.l	er3,@-sp
 | 
						|
	.endm
 | 
						|
	.macro	RESTOREREGS
 | 
						|
	mov.l	@sp+,er3
 | 
						|
	mov.l	@sp+,er2
 | 
						|
	.endm
 | 
						|
	.macro	SAVEEXR
 | 
						|
	.endm
 | 
						|
	.macro	RESTOREEXR
 | 
						|
	.endm
 | 
						|
#endif
 | 
						|
#if defined(CONFIG_CPU_H8S)
 | 
						|
#define USERRET 10
 | 
						|
#define USEREXR 8
 | 
						|
INTERRUPTS = 128
 | 
						|
	.h8300s
 | 
						|
	.macro	SHLL2 reg
 | 
						|
	shll.l	#2,\reg
 | 
						|
	.endm
 | 
						|
	.macro	SHLR2 reg
 | 
						|
	shlr.l	#2,\reg
 | 
						|
	.endm
 | 
						|
	.macro	SAVEREGS
 | 
						|
	stm.l	er0-er3,@-sp
 | 
						|
	.endm
 | 
						|
	.macro	RESTOREREGS
 | 
						|
	ldm.l	@sp+,er2-er3
 | 
						|
	.endm
 | 
						|
	.macro	SAVEEXR
 | 
						|
	mov.w	@(USEREXR:16,er0),r1
 | 
						|
	mov.w	r1,@(LEXR-LER3:16,sp)		/* copy EXR */
 | 
						|
	.endm
 | 
						|
	.macro	RESTOREEXR
 | 
						|
	mov.w	@(LEXR-LER1:16,sp),r1		/* restore EXR */
 | 
						|
	mov.b	r1l,r1h
 | 
						|
	mov.w	r1,@(USEREXR:16,er0)
 | 
						|
	.endm
 | 
						|
#endif
 | 
						|
 | 
						|
 | 
						|
/* CPU context save/restore macros. */
 | 
						|
 | 
						|
	.macro	SAVE_ALL
 | 
						|
	mov.l	er0,@-sp
 | 
						|
	stc	ccr,r0l				/* check kernel mode */
 | 
						|
	btst	#4,r0l
 | 
						|
	bne	5f
 | 
						|
 | 
						|
	/* user mode */
 | 
						|
	mov.l	sp,@SYMBOL_NAME(sw_usp)
 | 
						|
	mov.l	@sp,er0				/* restore saved er0 */
 | 
						|
	orc	#0x10,ccr			/* switch kernel stack */
 | 
						|
	mov.l	@SYMBOL_NAME(sw_ksp),sp
 | 
						|
	sub.l	#(LRET-LORIG),sp		/* allocate LORIG - LRET */
 | 
						|
	SAVEREGS
 | 
						|
	mov.l   @SYMBOL_NAME(sw_usp),er0
 | 
						|
	mov.l   @(USERRET:16,er0),er1           /* copy the RET addr */
 | 
						|
	mov.l   er1,@(LRET-LER3:16,sp)
 | 
						|
	SAVEEXR
 | 
						|
 | 
						|
	mov.l	@(LORIG-LER3:16,sp),er0
 | 
						|
	mov.l	er0,@(LER0-LER3:16,sp)		/* copy ER0 */
 | 
						|
	mov.w	e1,r1				/* e1 highbyte = ccr */
 | 
						|
	and	#0xef,r1h			/* mask mode? flag */
 | 
						|
	bra	6f
 | 
						|
5:
 | 
						|
	/* kernel mode */
 | 
						|
	mov.l	@sp,er0				/* restore saved er0 */
 | 
						|
	subs	#2,sp				/* set dummy ccr */
 | 
						|
	SAVEREGS
 | 
						|
	mov.w	@(LRET-LER3:16,sp),r1		/* copy old ccr */
 | 
						|
6:
 | 
						|
	mov.b	r1h,r1l
 | 
						|
	mov.b	#0,r1h
 | 
						|
	mov.w	r1,@(LCCR-LER3:16,sp)		/* set ccr */
 | 
						|
	mov.l	er6,@-sp			/* syscall arg #6 */
 | 
						|
	mov.l	er5,@-sp			/* syscall arg #5 */
 | 
						|
	mov.l	er4,@-sp			/* syscall arg #4 */
 | 
						|
	.endm					/* r1 = ccr */
 | 
						|
 | 
						|
	.macro	RESTORE_ALL
 | 
						|
	mov.l	@sp+,er4
 | 
						|
	mov.l	@sp+,er5
 | 
						|
	mov.l	@sp+,er6
 | 
						|
	RESTOREREGS
 | 
						|
	mov.w	@(LCCR-LER1:16,sp),r0		/* check kernel mode */
 | 
						|
	btst	#4,r0l
 | 
						|
	bne	7f
 | 
						|
 | 
						|
	orc	#0x80,ccr
 | 
						|
	mov.l	@SYMBOL_NAME(sw_usp),er0
 | 
						|
	mov.l	@(LER0-LER1:16,sp),er1		/* restore ER0 */
 | 
						|
	mov.l	er1,@er0
 | 
						|
	RESTOREEXR
 | 
						|
	mov.w	@(LCCR-LER1:16,sp),r1		/* restore the RET addr */
 | 
						|
	mov.b	r1l,r1h
 | 
						|
	mov.b	@(LRET+1-LER1:16,sp),r1l
 | 
						|
	mov.w	r1,e1
 | 
						|
	mov.w	@(LRET+2-LER1:16,sp),r1
 | 
						|
	mov.l	er1,@(USERRET:16,er0)
 | 
						|
 | 
						|
	mov.l	@sp+,er1
 | 
						|
	add.l	#(LRET-LER1),sp			/* remove LORIG - LRET */
 | 
						|
	mov.l	sp,@SYMBOL_NAME(sw_ksp)
 | 
						|
	andc	#0xef,ccr			/* switch to user mode */
 | 
						|
	mov.l	er0,sp
 | 
						|
	bra	8f
 | 
						|
7:
 | 
						|
	mov.l	@sp+,er1
 | 
						|
	adds	#4,sp
 | 
						|
	adds	#2,sp
 | 
						|
8:
 | 
						|
	mov.l	@sp+,er0
 | 
						|
	adds	#4,sp				/* remove the sw created LVEC */
 | 
						|
	rte
 | 
						|
	.endm
 | 
						|
 | 
						|
.globl SYMBOL_NAME(system_call)
 | 
						|
.globl SYMBOL_NAME(ret_from_exception)
 | 
						|
.globl SYMBOL_NAME(ret_from_fork)
 | 
						|
.globl SYMBOL_NAME(ret_from_interrupt)
 | 
						|
.globl SYMBOL_NAME(interrupt_redirect_table)
 | 
						|
.globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp)
 | 
						|
.globl SYMBOL_NAME(resume)
 | 
						|
.globl SYMBOL_NAME(interrupt_entry)
 | 
						|
.globl SYMBOL_NAME(trace_break)
 | 
						|
 | 
						|
#if defined(CONFIG_ROMKERNEL)
 | 
						|
	.section .int_redirect,"ax"
 | 
						|
SYMBOL_NAME_LABEL(interrupt_redirect_table)
 | 
						|
#if defined(CONFIG_CPU_H8300H)
 | 
						|
	.rept	7
 | 
						|
	.long	0
 | 
						|
	.endr
 | 
						|
#endif
 | 
						|
#if defined(CONFIG_CPU_H8S)
 | 
						|
	.rept	5
 | 
						|
	.long	0
 | 
						|
	.endr
 | 
						|
	jmp	@SYMBOL_NAME(trace_break)
 | 
						|
	.long	0
 | 
						|
#endif
 | 
						|
 | 
						|
	jsr	@SYMBOL_NAME(interrupt_entry)	/* NMI */
 | 
						|
	jmp	@SYMBOL_NAME(system_call)	/* TRAPA #0 (System call) */
 | 
						|
	.long	0
 | 
						|
	.long	0
 | 
						|
	jmp	@SYMBOL_NAME(trace_break)	/* TRAPA #3 (breakpoint) */
 | 
						|
	.rept	INTERRUPTS-12
 | 
						|
	jsr	@SYMBOL_NAME(interrupt_entry)
 | 
						|
	.endr
 | 
						|
#endif
 | 
						|
#if defined(CONFIG_RAMKERNEL)
 | 
						|
.globl SYMBOL_NAME(interrupt_redirect_table)
 | 
						|
	.section .bss
 | 
						|
SYMBOL_NAME_LABEL(interrupt_redirect_table)
 | 
						|
	.space	4
 | 
						|
#endif
 | 
						|
 | 
						|
	.section .text
 | 
						|
	.align	2
 | 
						|
SYMBOL_NAME_LABEL(interrupt_entry)
 | 
						|
	SAVE_ALL
 | 
						|
	mov.l	sp,er0
 | 
						|
	add.l	#LVEC,er0
 | 
						|
	btst	#4,r1l
 | 
						|
	bne	1f
 | 
						|
	/* user LVEC */
 | 
						|
	mov.l	@SYMBOL_NAME(sw_usp),er0
 | 
						|
	adds	#4,er0
 | 
						|
1:
 | 
						|
	mov.l	@er0,er0			/* LVEC address */
 | 
						|
#if defined(CONFIG_ROMKERNEL)
 | 
						|
	sub.l	#SYMBOL_NAME(interrupt_redirect_table),er0
 | 
						|
#endif
 | 
						|
#if defined(CONFIG_RAMKERNEL)
 | 
						|
	mov.l	@SYMBOL_NAME(interrupt_redirect_table),er1
 | 
						|
	sub.l	er1,er0
 | 
						|
#endif
 | 
						|
	SHLR2	er0
 | 
						|
	dec.l	#1,er0
 | 
						|
	mov.l	sp,er1
 | 
						|
	subs	#4,er1				/* adjust ret_pc */
 | 
						|
	jsr	@SYMBOL_NAME(do_IRQ)
 | 
						|
	jmp	@SYMBOL_NAME(ret_from_interrupt)
 | 
						|
 | 
						|
SYMBOL_NAME_LABEL(system_call)
 | 
						|
	subs	#4,sp				/* dummy LVEC */
 | 
						|
	SAVE_ALL
 | 
						|
	andc	#0x7f,ccr
 | 
						|
	mov.l	er0,er4
 | 
						|
 | 
						|
	/* save top of frame */
 | 
						|
	mov.l	sp,er0
 | 
						|
	jsr	@SYMBOL_NAME(set_esp0)
 | 
						|
	mov.l	sp,er2
 | 
						|
	and.w	#0xe000,r2
 | 
						|
	mov.b	@((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
 | 
						|
	btst	#(TIF_SYSCALL_TRACE & 7),r2l
 | 
						|
	beq	1f
 | 
						|
	jsr	@SYMBOL_NAME(do_syscall_trace)
 | 
						|
1:
 | 
						|
	cmp.l	#NR_syscalls,er4
 | 
						|
	bcc	badsys
 | 
						|
	SHLL2	er4
 | 
						|
	mov.l	#SYMBOL_NAME(sys_call_table),er0
 | 
						|
	add.l	er4,er0
 | 
						|
	mov.l	@er0,er4
 | 
						|
	beq	SYMBOL_NAME(ret_from_exception):16
 | 
						|
	mov.l	@(LER1:16,sp),er0
 | 
						|
	mov.l	@(LER2:16,sp),er1
 | 
						|
	mov.l	@(LER3:16,sp),er2
 | 
						|
	jsr	@er4
 | 
						|
	mov.l	er0,@(LER0:16,sp)		/* save the return value */
 | 
						|
	mov.l	sp,er2
 | 
						|
	and.w	#0xe000,r2
 | 
						|
	mov.b	@((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
 | 
						|
	btst	#(TIF_SYSCALL_TRACE & 7),r2l
 | 
						|
	beq	2f
 | 
						|
	jsr	@SYMBOL_NAME(do_syscall_trace)
 | 
						|
2:
 | 
						|
#if defined(CONFIG_SYSCALL_PRINT)
 | 
						|
	jsr	@SYMBOL_NAME(syscall_print)
 | 
						|
#endif
 | 
						|
	orc	#0x80,ccr
 | 
						|
	bra	resume_userspace
 | 
						|
 | 
						|
badsys:
 | 
						|
	mov.l	#-ENOSYS,er0
 | 
						|
	mov.l	er0,@(LER0:16,sp)
 | 
						|
	bra	resume_userspace
 | 
						|
 | 
						|
#if !defined(CONFIG_PREEMPT)
 | 
						|
#define resume_kernel restore_all
 | 
						|
#endif
 | 
						|
 | 
						|
SYMBOL_NAME_LABEL(ret_from_exception)
 | 
						|
#if defined(CONFIG_PREEMPT)
 | 
						|
	orc	#0x80,ccr
 | 
						|
#endif
 | 
						|
SYMBOL_NAME_LABEL(ret_from_interrupt)
 | 
						|
	mov.b	@(LCCR+1:16,sp),r0l
 | 
						|
	btst	#4,r0l
 | 
						|
	bne	resume_kernel:8		/* return from kernel */
 | 
						|
resume_userspace:
 | 
						|
	andc	#0x7f,ccr
 | 
						|
	mov.l	sp,er4
 | 
						|
	and.w	#0xe000,r4		/* er4 <- current thread info */
 | 
						|
	mov.l	@(TI_FLAGS:16,er4),er1
 | 
						|
	and.l	#_TIF_WORK_MASK,er1
 | 
						|
	beq	restore_all:8
 | 
						|
work_pending:
 | 
						|
	btst	#TIF_NEED_RESCHED,r1l
 | 
						|
	bne	work_resched:8
 | 
						|
	/* work notifysig */
 | 
						|
	mov.l	sp,er0
 | 
						|
	subs	#4,er0			/* er0: pt_regs */
 | 
						|
	jsr	@SYMBOL_NAME(do_notify_resume)
 | 
						|
	bra	restore_all:8
 | 
						|
work_resched:
 | 
						|
	mov.l	sp,er0
 | 
						|
	jsr	@SYMBOL_NAME(set_esp0)
 | 
						|
	jsr	@SYMBOL_NAME(schedule)
 | 
						|
	bra	resume_userspace:8
 | 
						|
restore_all:
 | 
						|
	RESTORE_ALL			/* Does RTE */
 | 
						|
 | 
						|
#if defined(CONFIG_PREEMPT)
 | 
						|
resume_kernel:
 | 
						|
	mov.l	@(TI_PRE_COUNT:16,er4),er0
 | 
						|
	bne	restore_all:8
 | 
						|
need_resched:
 | 
						|
	mov.l	@(TI_FLAGS:16,er4),er0
 | 
						|
	btst	#TIF_NEED_RESCHED,r0l
 | 
						|
	beq	restore_all:8
 | 
						|
	mov.b	@(LCCR+1:16,sp),r0l	/* Interrupt Enabled? */
 | 
						|
	bmi	restore_all:8
 | 
						|
	mov.l	#PREEMPT_ACTIVE,er0
 | 
						|
	mov.l	er0,@(TI_PRE_COUNT:16,er4)
 | 
						|
	andc	#0x7f,ccr
 | 
						|
	mov.l	sp,er0
 | 
						|
	jsr	@SYMBOL_NAME(set_esp0)
 | 
						|
	jsr	@SYMBOL_NAME(schedule)
 | 
						|
	orc	#0x80,ccr
 | 
						|
	bra	need_resched:8
 | 
						|
#endif
 | 
						|
 | 
						|
SYMBOL_NAME_LABEL(ret_from_fork)
 | 
						|
	mov.l	er2,er0
 | 
						|
	jsr	@SYMBOL_NAME(schedule_tail)
 | 
						|
	jmp	@SYMBOL_NAME(ret_from_exception)
 | 
						|
 | 
						|
SYMBOL_NAME_LABEL(resume)
 | 
						|
	/*
 | 
						|
	 * Beware - when entering resume, offset of tss is in d1,
 | 
						|
	 * prev (the current task) is in a0, next (the new task)
 | 
						|
	 * is in a1 and d2.b is non-zero if the mm structure is
 | 
						|
	 * shared between the tasks, so don't change these
 | 
						|
	 * registers until their contents are no longer needed.
 | 
						|
	 */
 | 
						|
 | 
						|
	/* save sr */
 | 
						|
	sub.w	r3,r3
 | 
						|
	stc	ccr,r3l
 | 
						|
	mov.w	r3,@(THREAD_CCR+2:16,er0)
 | 
						|
 | 
						|
	/* disable interrupts */
 | 
						|
	orc	#0x80,ccr
 | 
						|
	mov.l	@SYMBOL_NAME(sw_usp),er3
 | 
						|
	mov.l	er3,@(THREAD_USP:16,er0)
 | 
						|
	mov.l	sp,@(THREAD_KSP:16,er0)
 | 
						|
 | 
						|
	/* Skip address space switching if they are the same. */
 | 
						|
	/* FIXME: what did we hack out of here, this does nothing! */
 | 
						|
 | 
						|
	mov.l	@(THREAD_USP:16,er1),er0
 | 
						|
	mov.l	er0,@SYMBOL_NAME(sw_usp)
 | 
						|
	mov.l	@(THREAD_KSP:16,er1),sp
 | 
						|
 | 
						|
	/* restore status register */
 | 
						|
	mov.w	@(THREAD_CCR+2:16,er1),r3
 | 
						|
 | 
						|
	ldc	r3l,ccr
 | 
						|
	rts
 | 
						|
 | 
						|
SYMBOL_NAME_LABEL(trace_break)
 | 
						|
	subs	#4,sp
 | 
						|
	SAVE_ALL
 | 
						|
	sub.l	er1,er1
 | 
						|
	dec.l	#1,er1
 | 
						|
	mov.l	er1,@(LORIG,sp)
 | 
						|
	mov.l	sp,er0
 | 
						|
	jsr	@SYMBOL_NAME(set_esp0)
 | 
						|
	mov.l	@SYMBOL_NAME(sw_usp),er0
 | 
						|
	mov.l	@er0,er1
 | 
						|
	mov.w	@(-2:16,er1),r2
 | 
						|
	cmp.w	#0x5730,r2
 | 
						|
	beq	1f
 | 
						|
	subs	#2,er1
 | 
						|
	mov.l	er1,@er0
 | 
						|
1:
 | 
						|
	and.w	#0xff,e1
 | 
						|
	mov.l	er1,er0
 | 
						|
	jsr	@SYMBOL_NAME(trace_trap)
 | 
						|
	jmp	@SYMBOL_NAME(ret_from_exception)
 | 
						|
 | 
						|
	.section	.bss
 | 
						|
SYMBOL_NAME_LABEL(sw_ksp)
 | 
						|
	.space	4
 | 
						|
SYMBOL_NAME_LABEL(sw_usp)
 | 
						|
	.space	4
 | 
						|
 | 
						|
	.end
 |