/*
* Exception vectors.
*/
+ .pushsection ".entry.text", "ax"
.align 11
ENTRY(vectors)
bl do_ni_syscall
b __sys_trace_return
+ .popsection // .entry.text
+
/*
* Special system call wrappers.
*/
#include <asm/insn.h>
#include <asm/uaccess.h>
#include <asm/irq.h>
+#include <asm-generic/sections.h>
#include "decode-insn.h"
return 1;
}
+bool arch_within_kprobe_blacklist(unsigned long addr)
+{
+ extern char __idmap_text_start[], __idmap_text_end[];
+ extern char __hyp_idmap_text_start[], __hyp_idmap_text_end[];
+
+ if ((addr >= (unsigned long)__kprobes_text_start &&
+ addr < (unsigned long)__kprobes_text_end) ||
+ (addr >= (unsigned long)__entry_text_start &&
+ addr < (unsigned long)__entry_text_end) ||
+ (addr >= (unsigned long)__idmap_text_start &&
+ addr < (unsigned long)__idmap_text_end) ||
+ !!search_exception_tables(addr))
+ return true;
+
+ if (!is_kernel_in_hyp_mode()) {
+ if ((addr >= (unsigned long)__hyp_text_start &&
+ addr < (unsigned long)__hyp_text_end) ||
+ (addr >= (unsigned long)__hyp_idmap_text_start &&
+ addr < (unsigned long)__hyp_idmap_text_end))
+ return true;
+ }
+
+ return false;
+}
+
int __init arch_init_kprobes(void)
{
return 0;