 aedd153f5b
			
		
	
	
	aedd153f5b
	
	
	
		
			
			Code before the .fixup section needs to have the .insn directive.
This has no side effects on MIPS32/64 but it affects the way microMIPS
loads the address for the return label.
Fixes the following build problem:
mips-linux-gnu-ld: arch/mips/built-in.o: .fixup+0x4a0: Unsupported jump between
ISA modes; consider recompiling with interlinking enabled.
mips-linux-gnu-ld: final link failed: Bad value
Makefile:819: recipe for target 'vmlinux' failed
The fix is similar to 1658f914ff ("MIPS: microMIPS:
Disable LL/SC and fix linker bug.")
Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Cc: stable@vger.kernel.org
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/8117/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
		
	
			
		
			
				
	
	
		
			90 lines
		
	
	
	
		
			2.1 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			90 lines
		
	
	
	
		
			2.1 KiB
			
		
	
	
	
		
			C
		
	
	
	
	
	
| /*
 | |
|  * This file is subject to the terms and conditions of the GNU General Public
 | |
|  * License.  See the file "COPYING" in the main directory of this archive for
 | |
|  * more details.
 | |
|  *
 | |
|  * Copyright (C) 2009 DSLab, Lanzhou University, China
 | |
|  * Author: Wu Zhangjin <wuzhangjin@gmail.com>
 | |
|  */
 | |
| 
 | |
| #ifndef _ASM_MIPS_FTRACE_H
 | |
| #define _ASM_MIPS_FTRACE_H
 | |
| 
 | |
| #ifdef CONFIG_FUNCTION_TRACER
 | |
| 
 | |
| #define MCOUNT_ADDR ((unsigned long)(_mcount))
 | |
| #define MCOUNT_INSN_SIZE 4		/* sizeof mcount call */
 | |
| 
 | |
| #ifndef __ASSEMBLY__
 | |
| extern void _mcount(void);
 | |
| #define mcount _mcount
 | |
| 
 | |
| #define safe_load(load, src, dst, error)		\
 | |
| do {							\
 | |
| 	asm volatile (					\
 | |
| 		"1: " load " %[tmp_dst], 0(%[tmp_src])\n"	\
 | |
| 		"   li %[tmp_err], 0\n"			\
 | |
| 		"2: .insn\n"				\
 | |
| 							\
 | |
| 		".section .fixup, \"ax\"\n"		\
 | |
| 		"3: li %[tmp_err], 1\n"			\
 | |
| 		"   j 2b\n"				\
 | |
| 		".previous\n"				\
 | |
| 							\
 | |
| 		".section\t__ex_table,\"a\"\n\t"	\
 | |
| 		STR(PTR) "\t1b, 3b\n\t"			\
 | |
| 		".previous\n"				\
 | |
| 							\
 | |
| 		: [tmp_dst] "=&r" (dst), [tmp_err] "=r" (error)\
 | |
| 		: [tmp_src] "r" (src)			\
 | |
| 		: "memory"				\
 | |
| 	);						\
 | |
| } while (0)
 | |
| 
 | |
| #define safe_store(store, src, dst, error)	\
 | |
| do {						\
 | |
| 	asm volatile (				\
 | |
| 		"1: " store " %[tmp_src], 0(%[tmp_dst])\n"\
 | |
| 		"   li %[tmp_err], 0\n"		\
 | |
| 		"2: .insn\n"			\
 | |
| 						\
 | |
| 		".section .fixup, \"ax\"\n"	\
 | |
| 		"3: li %[tmp_err], 1\n"		\
 | |
| 		"   j 2b\n"			\
 | |
| 		".previous\n"			\
 | |
| 						\
 | |
| 		".section\t__ex_table,\"a\"\n\t"\
 | |
| 		STR(PTR) "\t1b, 3b\n\t"		\
 | |
| 		".previous\n"			\
 | |
| 						\
 | |
| 		: [tmp_err] "=r" (error)	\
 | |
| 		: [tmp_dst] "r" (dst), [tmp_src] "r" (src)\
 | |
| 		: "memory"			\
 | |
| 	);					\
 | |
| } while (0)
 | |
| 
 | |
| #define safe_load_code(dst, src, error) \
 | |
| 	safe_load(STR(lw), src, dst, error)
 | |
| #define safe_store_code(src, dst, error) \
 | |
| 	safe_store(STR(sw), src, dst, error)
 | |
| 
 | |
| #define safe_load_stack(dst, src, error) \
 | |
| 	safe_load(STR(PTR_L), src, dst, error)
 | |
| 
 | |
| #define safe_store_stack(src, dst, error) \
 | |
| 	safe_store(STR(PTR_S), src, dst, error)
 | |
| 
 | |
| 
 | |
| #ifdef CONFIG_DYNAMIC_FTRACE
 | |
| static inline unsigned long ftrace_call_adjust(unsigned long addr)
 | |
| {
 | |
| 	return addr;
 | |
| }
 | |
| 
 | |
| struct dyn_arch_ftrace {
 | |
| };
 | |
| 
 | |
| #endif /*  CONFIG_DYNAMIC_FTRACE */
 | |
| #endif /* __ASSEMBLY__ */
 | |
| #endif /* CONFIG_FUNCTION_TRACER */
 | |
| #endif /* _ASM_MIPS_FTRACE_H */
 |