From: H. Peter Anvin Date: Wed, 30 Jan 2008 12:30:30 +0000 (+0100) Subject: x86: prepare merger of X-Git-Url: https://git.stricted.de/?a=commitdiff_plain;h=1d8a1f6b51f6b195dfdcf05821be97edede5664a;p=GitHub%2FLineageOS%2Fandroid_kernel_motorola_exynos9610.git x86: prepare merger of Prepare for merging by making the 32- and 64-bit versions textually identical. This involves: - removing arbitrary header inclusion differences - reorganizing the 32-bit version slightly to match the 64-bit version - using to unify the assembly code - renaming struct paravirt_patch to struct paravirt_patch_site in the 64-bit version to match the 32-bit version; there are no references to struct paravirt_patch elsewhere in the tree. Signed-off-by: H. Peter Anvin Signed-off-by: Thomas Gleixner Signed-off-by: Ingo Molnar Signed-off-by: Thomas Gleixner --- diff --git a/include/asm-x86/alternative_32.h b/include/asm-x86/alternative_32.h index bda6c810c0f4..4919b5ee8b9f 100644 --- a/include/asm-x86/alternative_32.h +++ b/include/asm-x86/alternative_32.h @@ -1,23 +1,63 @@ #ifndef _I386_ALTERNATIVE_H #define _I386_ALTERNATIVE_H -#include -#include #include +#include +#include + +/* + * Alternative inline assembly for SMP. + * + * The LOCK_PREFIX macro defined here replaces the LOCK and + * LOCK_PREFIX macros used everywhere in the source tree. + * + * SMP alternatives use the same data structures as the other + * alternatives and the X86_FEATURE_UP flag to indicate the case of a + * UP system running a SMP kernel. The existing apply_alternatives() + * works fine for patching a SMP kernel for UP. + * + * The SMP alternative tables can be kept after boot and contain both + * UP and SMP versions of the instructions to allow switching back to + * SMP at runtime, when hotplugging in a new CPU, which is especially + * useful in virtualized environments. + * + * The very common lock prefix is handled as special case in a + * separate table which is a pure address list without replacement ptr + * and size information. That keeps the table sizes small. + */ + +#ifdef CONFIG_SMP +#define LOCK_PREFIX \ + ".section .smp_locks,\"a\"\n" \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661f\n" /* address */ \ + ".previous\n" \ + "661:\n\tlock; " + +#else /* ! CONFIG_SMP */ +#define LOCK_PREFIX "" +#endif + +/* This must be included *after* the definition of LOCK_PREFIX */ +#include struct alt_instr { - u8 *instr; /* original instruction */ + u8 *instr; /* original instruction */ u8 *replacement; u8 cpuid; /* cpuid bit set for replacement */ u8 instrlen; /* length of original instruction */ - u8 replacementlen; /* length of new instruction, <= instrlen */ - u8 pad; + u8 replacementlen; /* length of new instruction, <= instrlen */ + u8 pad1; +#ifdef CONFIG_X86_64 + u32 pad2; +#endif }; extern void alternative_instructions(void); extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end); struct module; + #ifdef CONFIG_SMP extern void alternatives_smp_module_add(struct module *mod, char *name, void *locks, void *locks_end, @@ -45,17 +85,17 @@ static inline void alternatives_smp_switch(int smp) {} * without volatile and memory clobber. */ #define alternative(oldinstr, newinstr, feature) \ - asm volatile ("661:\n\t" oldinstr "\n662:\n" \ + asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature) : "memory") /* @@ -66,35 +106,35 @@ static inline void alternatives_smp_switch(int smp) {} * Argument numbers start with 1. * Best is to use constraints that are fixed size (like (%1) ... "r") * If you use variable sized constraints like "m" or "g" in the - * replacement maake sure to pad to the worst case length. + * replacement make sure to pad to the worst case length. */ #define alternative_input(oldinstr, newinstr, feature, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */\ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature), ##input) /* Like alternative_input, but with a single output argument */ -#define alternative_io(oldinstr, newinstr, feature, output, input...) \ +#define alternative_io(oldinstr, newinstr, feature, output, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 4\n" \ - " .long 661b\n" /* label */ \ - " .long 663f\n" /* new instruction */ \ - " .byte %c[feat]\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c[feat]\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" : output : [feat] "i" (feature), ##input) /* @@ -103,39 +143,6 @@ static inline void alternatives_smp_switch(int smp) {} */ #define ASM_OUTPUT2(a, b) a, b -/* - * Alternative inline assembly for SMP. - * - * The LOCK_PREFIX macro defined here replaces the LOCK and - * LOCK_PREFIX macros used everywhere in the source tree. - * - * SMP alternatives use the same data structures as the other - * alternatives and the X86_FEATURE_UP flag to indicate the case of a - * UP system running a SMP kernel. The existing apply_alternatives() - * works fine for patching a SMP kernel for UP. - * - * The SMP alternative tables can be kept after boot and contain both - * UP and SMP versions of the instructions to allow switching back to - * SMP at runtime, when hotplugging in a new CPU, which is especially - * useful in virtualized environments. - * - * The very common lock prefix is handled as special case in a - * separate table which is a pure address list without replacement ptr - * and size information. That keeps the table sizes small. - */ - -#ifdef CONFIG_SMP -#define LOCK_PREFIX \ - ".section .smp_locks,\"a\"\n" \ - " .align 4\n" \ - " .long 661f\n" /* address */ \ - ".previous\n" \ - "661:\n\tlock; " - -#else /* ! CONFIG_SMP */ -#define LOCK_PREFIX "" -#endif - struct paravirt_patch_site; #ifdef CONFIG_PARAVIRT void apply_paravirt(struct paravirt_patch_site *start, diff --git a/include/asm-x86/alternative_64.h b/include/asm-x86/alternative_64.h index ab161e810151..50efcebae33f 100644 --- a/include/asm-x86/alternative_64.h +++ b/include/asm-x86/alternative_64.h @@ -1,10 +1,9 @@ #ifndef _X86_64_ALTERNATIVE_H #define _X86_64_ALTERNATIVE_H -#ifdef __KERNEL__ - #include #include +#include /* * Alternative inline assembly for SMP. @@ -30,10 +29,10 @@ #ifdef CONFIG_SMP #define LOCK_PREFIX \ ".section .smp_locks,\"a\"\n" \ - " .align 8\n" \ - " .quad 661f\n" /* address */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661f\n" /* address */ \ ".previous\n" \ - "661:\n\tlock; " + "661:\n\tlock; " #else /* ! CONFIG_SMP */ #define LOCK_PREFIX "" @@ -43,12 +42,15 @@ #include struct alt_instr { - u8 *instr; /* original instruction */ + u8 *instr; /* original instruction */ u8 *replacement; u8 cpuid; /* cpuid bit set for replacement */ u8 instrlen; /* length of original instruction */ - u8 replacementlen; /* length of new instruction, <= instrlen */ - u8 pad[5]; + u8 replacementlen; /* length of new instruction, <= instrlen */ + u8 pad1; +#ifdef CONFIG_X86_64 + u32 pad2; +#endif }; extern void alternative_instructions(void); @@ -68,9 +70,7 @@ static inline void alternatives_smp_module_add(struct module *mod, char *name, void *text, void *text_end) {} static inline void alternatives_smp_module_del(struct module *mod) {} static inline void alternatives_smp_switch(int smp) {} -#endif - -#endif +#endif /* CONFIG_SMP */ /* * Alternative instructions for different CPU types or capabilities. @@ -84,18 +84,18 @@ static inline void alternatives_smp_switch(int smp) {} * For non barrier like inlines please define new variants * without volatile and memory clobber. */ -#define alternative(oldinstr, newinstr, feature) \ - asm volatile ("661:\n\t" oldinstr "\n662:\n" \ - ".section .altinstructions,\"a\"\n" \ - " .align 8\n" \ - " .quad 661b\n" /* label */ \ - " .quad 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ +#define alternative(oldinstr, newinstr, feature) \ + asm volatile ("661:\n\t" oldinstr "\n662:\n" \ + ".section .altinstructions,\"a\"\n" \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature) : "memory") /* @@ -111,30 +111,30 @@ static inline void alternatives_smp_switch(int smp) {} #define alternative_input(oldinstr, newinstr, feature, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 8\n" \ - " .quad 661b\n" /* label */ \ - " .quad 663f\n" /* new instruction */ \ - " .byte %c0\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c0\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" :: "i" (feature), ##input) /* Like alternative_input, but with a single output argument */ -#define alternative_io(oldinstr, newinstr, feature, output, input...) \ +#define alternative_io(oldinstr, newinstr, feature, output, input...) \ asm volatile ("661:\n\t" oldinstr "\n662:\n" \ ".section .altinstructions,\"a\"\n" \ - " .align 8\n" \ - " .quad 661b\n" /* label */ \ - " .quad 663f\n" /* new instruction */ \ - " .byte %c[feat]\n" /* feature bit */ \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ + _ASM_ALIGN "\n" \ + _ASM_PTR "661b\n" /* label */ \ + _ASM_PTR "663f\n" /* new instruction */ \ + " .byte %c[feat]\n" /* feature bit */ \ + " .byte 662b-661b\n" /* sourcelen */ \ + " .byte 664f-663f\n" /* replacementlen */ \ ".previous\n" \ ".section .altinstr_replacement,\"ax\"\n" \ - "663:\n\t" newinstr "\n664:\n" /* replacement */ \ + "663:\n\t" newinstr "\n664:\n" /* replacement */ \ ".previous" : output : [feat] "i" (feature), ##input) /* @@ -143,15 +143,17 @@ static inline void alternatives_smp_switch(int smp) {} */ #define ASM_OUTPUT2(a, b) a, b -struct paravirt_patch; +struct paravirt_patch_site; #ifdef CONFIG_PARAVIRT -void apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end); +void apply_paravirt(struct paravirt_patch_site *start, + struct paravirt_patch_site *end); #else static inline void -apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end) +apply_paravirt(struct paravirt_patch_site *start, + struct paravirt_patch_site *end) {} -#define __parainstructions NULL -#define __parainstructions_end NULL +#define __parainstructions NULL +#define __parainstructions_end NULL #endif extern void text_poke(void *addr, unsigned char *opcode, int len);