diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h index 094fbc9c0b1c..13adca37c99a 100644 --- a/arch/x86/include/asm/alternative.h +++ b/arch/x86/include/asm/alternative.h @@ -201,10 +201,10 @@ static inline int alternatives_text_reserved(void *start, void *end) * without volatile and memory clobber. */ #define alternative(oldinstr, newinstr, feature) \ - asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory") + asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory") #define alternative_2(oldinstr, newinstr1, feature1, newinstr2, feature2) \ - asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory") + asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory") /* * Alternative inline assembly with input. @@ -218,7 +218,7 @@ static inline int alternatives_text_reserved(void *start, void *end) * Leaving an unused argument 0 to keep API compatibility. */ #define alternative_input(oldinstr, newinstr, feature, input...) \ - asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ + asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ : : "i" (0), ## input) /* @@ -231,18 +231,18 @@ static inline int alternatives_text_reserved(void *start, void *end) */ #define alternative_input_2(oldinstr, newinstr1, feature1, newinstr2, \ feature2, input...) \ - asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, \ + asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, \ newinstr2, feature2) \ : : "i" (0), ## input) /* Like alternative_input, but with a single output argument */ #define alternative_io(oldinstr, newinstr, feature, output, input...) \ - asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ + asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \ : output : "i" (0), ## input) /* Like alternative_io, but for replacing a direct call with another one. */ #define alternative_call(oldfunc, newfunc, feature, output, input...) \ - asm volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \ + asm_inline volatile (ALTERNATIVE("call %P[old]", "call %P[new]", feature) \ : output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input) /* @@ -253,7 +253,7 @@ static inline int alternatives_text_reserved(void *start, void *end) */ #define alternative_call_2(oldfunc, newfunc1, feature1, newfunc2, feature2, \ output, input...) \ - asm volatile (ALTERNATIVE_2("call %P[old]", "call %P[new1]", feature1,\ + asm_inline volatile (ALTERNATIVE_2("call %P[old]", "call %P[new1]", feature1,\ "call %P[new2]", feature2) \ : output, ASM_CALL_CONSTRAINT \ : [old] "i" (oldfunc), [new1] "i" (newfunc1), \ diff --git a/arch/x86/include/asm/bug.h b/arch/x86/include/asm/bug.h index 6804d6642767..facba9bc30ca 100644 --- a/arch/x86/include/asm/bug.h +++ b/arch/x86/include/asm/bug.h @@ -32,7 +32,7 @@ #define _BUG_FLAGS(ins, flags) \ do { \ - asm volatile("1:\t" ins "\n" \ + asm_inline volatile("1:\t" ins "\n" \ ".pushsection __bug_table,\"aw\"\n" \ "2:\t" __BUG_REL(1b) "\t# bug_entry::bug_addr\n" \ "\t" __BUG_REL(%c0) "\t# bug_entry::file\n" \ @@ -49,7 +49,7 @@ do { \ #define _BUG_FLAGS(ins, flags) \ do { \ - asm volatile("1:\t" ins "\n" \ + asm_inline volatile("1:\t" ins "\n" \ ".pushsection __bug_table,\"aw\"\n" \ "2:\t" __BUG_REL(1b) "\t# bug_entry::bug_addr\n" \ "\t.word %c0" "\t# bug_entry::flags\n" \ diff --git a/include/linux/compiler_types.h b/include/linux/compiler_types.h index b056a40116da..72393a8c1a6c 100644 --- a/include/linux/compiler_types.h +++ b/include/linux/compiler_types.h @@ -146,8 +146,17 @@ struct ftrace_likely_data { __inline_maybe_unused notrace #endif +/* + * gcc provides both __inline__ and __inline as alternate spellings of + * the inline keyword, though the latter is undocumented. New kernel + * code should only use the inline spelling, but some existing code + * uses __inline__. Since we #define inline above, to ensure + * __inline__ has the same semantics, we need this #define. + * + * However, the spelling __inline is strictly reserved for referring + * to the bare keyword. + */ #define __inline__ inline -#define __inline inline /* * GCC does not warn about unused static inline functions for -Wunused-function. @@ -197,6 +206,12 @@ struct ftrace_likely_data { #define asm_volatile_goto(x...) asm goto(x) #endif +#ifdef CONFIG_CC_HAS_ASM_INLINE +#define asm_inline asm __inline +#else +#define asm_inline asm +#endif + #ifndef __no_fgcse # define __no_fgcse #endif diff --git a/init/Kconfig b/init/Kconfig index f4534c58342d..7020238fd263 100644 --- a/init/Kconfig +++ b/init/Kconfig @@ -33,6 +33,9 @@ config CC_HAS_ASM_GOTO config TOOLS_SUPPORT_RELR def_bool $(success,env "CC=$(CC)" "LD=$(LD)" "NM=$(NM)" "OBJCOPY=$(OBJCOPY)" $(srctree)/scripts/tools-support-relr.sh) +config CC_HAS_ASM_INLINE + def_bool $(success,echo 'void foo(void) { asm inline (""); }' | $(CC) -x c - -c -o /dev/null) + config CC_HAS_WARN_MAYBE_UNINITIALIZED def_bool $(cc-option,-Wmaybe-uninitialized) help diff --git a/lib/zstd/mem.h b/lib/zstd/mem.h index 3a0f34c8706c..93d7a2c377fe 100644 --- a/lib/zstd/mem.h +++ b/lib/zstd/mem.h @@ -27,7 +27,7 @@ /*-**************************************** * Compiler specifics ******************************************/ -#define ZSTD_STATIC static __inline __attribute__((unused)) +#define ZSTD_STATIC static inline /*-************************************************************** * Basic Types