TEST_UNSUPPORTED("msr cpsr_f, lr")
TEST_UNSUPPORTED("msr spsr, r0")
+#if __LINUX_ARM_ARCH__ >= 5 || \
+ (__LINUX_ARM_ARCH__ == 4 && !defined(CONFIG_CPU_32v4))
TEST_BF_R("bx r",0,2f,"")
TEST_BB_R("bx r",7,2f,"")
TEST_BF_R("bxeq r",14,2f,"")
+#endif
#if __LINUX_ARM_ARCH__ >= 5
TEST_R("clz r0, r",0, 0x0,"")
#ifndef CONFIG_THUMB2_KERNEL
+#define RET(reg) "mov pc, "#reg
+
long arm_func(long r0, long r1);
static void __used __naked __arm_kprobes_test_func(void)
".type arm_func, %%function \n\t"
"arm_func: \n\t"
"adds r0, r0, r1 \n\t"
- "bx lr \n\t"
+ "mov pc, lr \n\t"
".code "NORMAL_ISA /* Back to Thumb if necessary */
: : : "r0", "r1", "cc"
);
#else /* CONFIG_THUMB2_KERNEL */
+#define RET(reg) "bx "#reg
+
long thumb16_func(long r0, long r1);
long thumb32even_func(long r0, long r1);
long thumb32odd_func(long r0, long r1);
{
__asm__ __volatile__ (
"nop \n\t"
- "bx lr"
+ RET(lr)" \n\t"
);
}
"bic r0, lr, #1 @ r0 = inline data \n\t"
"mov r1, sp \n\t"
"bl kprobes_test_case_start \n\t"
- "bx r0 \n\t"
+ RET(r0)" \n\t"
);
}