s390: Use inline qualifier for all EX_TABLE and ALTERNATIVE inline assemblies

Use asm_inline for all inline assemblies which make use of the EX_TABLE or
ALTERNATIVE macros.

These macros expand to many lines and the compiler assumes the number of
lines within an inline assembly is the same as the number of instructions
within an inline assembly. This has an effect on inlining and loop
unrolling decisions.

In order to avoid incorrect assumptions use asm_inline, which tells the
compiler that an inline assembly has the smallest possible size.

In order to avoid confusion when asm_inline should be used or not, since a
couple of inline assemblies are quite large: the rule is to always use
asm_inline whenever the EX_TABLE or ALTERNATIVE macro is used. In specific
cases there may be reasons to not follow this guideline, but that should
be documented with the corresponding code.

Using the inline qualifier everywhere has only a small effect on the kernel
image size:

add/remove: 0/10 grow/shrink: 19/8 up/down: 1492/-1858 (-366)

The only location where this seems to matter is load_unaligned_zeropad()
from word-at-a-time.h where the compiler inlines more functions within the
dcache code, which is indeed code where performance matters.

Suggested-by: Juergen Christ <jchrist@linux.ibm.com>
Reviewed-by: Juergen Christ <jchrist@linux.ibm.com>
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
This commit is contained in:
Heiko Carstens
2025-03-17 16:22:35 +01:00
committed by Vasily Gorbik
parent caa3cd5ccd
commit 0dafe9968a
20 changed files with 48 additions and 44 deletions

View File

@@ -171,7 +171,7 @@ static inline int qctri(struct cpumf_ctr_info *info)
{
int rc = -EINVAL;
asm volatile (
asm_inline volatile (
"0: qctri %1\n"
"1: lhi %0,0\n"
"2:\n"
@@ -185,7 +185,7 @@ static inline int lcctl(u64 ctl)
{
int cc;
asm volatile (
asm_inline volatile (
" lcctl %[ctl]\n"
CC_IPM(cc)
: CC_OUT(cc, cc)
@@ -200,7 +200,7 @@ static inline int __ecctr(u64 ctr, u64 *content)
u64 _content;
int cc;
asm volatile (
asm_inline volatile (
" ecctr %[_content],%[ctr]\n"
CC_IPM(cc)
: CC_OUT(cc, cc), [_content] "=d" (_content)

View File

@@ -66,7 +66,7 @@ static inline void diag10_range(unsigned long start_pfn, unsigned long num_pfn)
end_addr = pfn_to_phys(start_pfn + num_pfn - 1);
diag_stat_inc(DIAG_STAT_X010);
asm volatile(
asm_inline volatile(
"0: diag %0,%1,0x10\n"
"1: nopr %%r7\n"
EX_TABLE(0b, 1b)

View File

@@ -416,7 +416,11 @@ static __always_inline bool regs_irqs_disabled(struct pt_regs *regs)
static __always_inline void bpon(void)
{
asm volatile(ALTERNATIVE("nop", ".insn rrf,0xb2e80000,0,0,13,0", ALT_SPEC(82)));
asm_inline volatile(
ALTERNATIVE(" nop\n",
" .insn rrf,0xb2e80000,0,0,13,0\n",
ALT_SPEC(82))
);
}
#endif /* __ASSEMBLY__ */

View File

@@ -147,7 +147,7 @@ __put_user_##type##_noinstr(unsigned type __user *to, \
{ \
int rc; \
\
asm volatile( \
asm_inline volatile( \
" llilh %%r0,%[spec]\n" \
"0: mvcos %[to],%[from],%[size]\n" \
"1: lhi %[rc],0\n" \
@@ -263,7 +263,7 @@ __get_user_##type##_noinstr(unsigned type *to, \
{ \
int rc; \
\
asm volatile( \
asm_inline volatile( \
" lhi %%r0,%[spec]\n" \
"0: mvcos %[to],%[from],%[size]\n" \
"1: lhi %[rc],0\n" \
@@ -490,7 +490,7 @@ static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
_old = ((unsigned int)old & 0xff) << shift;
_new = ((unsigned int)new & 0xff) << shift;
mask = ~(0xff << shift);
asm volatile(
asm_inline volatile(
" spka 0(%[key])\n"
" sacf 256\n"
" llill %[count],%[max_loops]\n"
@@ -538,7 +538,7 @@ static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
_old = ((unsigned int)old & 0xffff) << shift;
_new = ((unsigned int)new & 0xffff) << shift;
mask = ~(0xffff << shift);
asm volatile(
asm_inline volatile(
" spka 0(%[key])\n"
" sacf 256\n"
" llill %[count],%[max_loops]\n"
@@ -580,7 +580,7 @@ static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
case 4: {
unsigned int prev = old;
asm volatile(
asm_inline volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: cs %[prev],%[new],%[address]\n"
@@ -601,7 +601,7 @@ static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
case 8: {
unsigned long prev = old;
asm volatile(
asm_inline volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: csg %[prev],%[new],%[address]\n"
@@ -622,7 +622,7 @@ static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
case 16: {
__uint128_t prev = old;
asm volatile(
asm_inline volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: cdsg %[prev],%[new],%[address]\n"

View File

@@ -52,7 +52,7 @@ static inline unsigned long load_unaligned_zeropad(const void *addr)
{
unsigned long data;
asm volatile(
asm_inline volatile(
"0: lg %[data],0(%[addr])\n"
"1: nopr %%r7\n"
EX_TABLE_ZEROPAD(0b, 1b, %[data], %[addr])