2024-05-31 12:35:49

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 00/14] x86/alternatives: Nest them

From: "Borislav Petkov (AMD)" <[email protected]>

Hi,

this is basically peterz's idea to nest the alternative macros with
a fix to an issue I encountered while testing.

For ease of bisection, the old macros are converted to the new, nested
variants in a step-by-step manner so that in case an issue is
encountered during testing, one can pinpoint the place where it fails
easier. Because debugging alternatives is a serious pain.

Testing here on my farm looks good but who knows what happens out there,
in the wild.

Thx.

Borislav Petkov (AMD) (13):
x86/alternative: Zap alternative_ternary()
x86/alternative: Convert alternative()
x86/alternative: Convert alternative_2()
x86/alternative: Convert alternative_input()
x86/alternative: Convert alternative_io()
x86/alternative: Convert alternative_call()
x86/alternative: Convert alternative_call_2()
x86/alternative: Convert ALTERNATIVE_TERNARY()
x86/alternative: Convert ALTERNATIVE_3()
x86/alternative: Convert the asm ALTERNATIVE() macro
x86/alternative: Convert the asm ALTERNATIVE_2() macro
x86/alternative: Convert the asm ALTERNATIVE_3() macro
x86/alternative: Replace the old macros

Peter Zijlstra (1):
x86/alternatives: Add nested alternatives macros

arch/x86/include/asm/alternative.h | 214 ++++++++---------------------
arch/x86/kernel/alternative.c | 20 ++-
arch/x86/kernel/fpu/xstate.h | 6 +-
tools/objtool/arch/x86/special.c | 23 ++++
tools/objtool/special.c | 16 +--
5 files changed, 113 insertions(+), 166 deletions(-)

--
2.43.0



2024-05-31 12:35:54

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 01/14] x86/alternative: Zap alternative_ternary()

From: "Borislav Petkov (AMD)" <[email protected]>

Unused.

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 3 ---
1 file changed, 3 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index ba99ef75f56c..6db78909180a 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -271,9 +271,6 @@ static inline int alternatives_text_reserved(void *start, void *end)
#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

-#define alternative_ternary(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
- asm_inline volatile(ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) ::: "memory")
-
/*
* Alternative inline assembly with input.
*
--
2.43.0


2024-05-31 12:36:15

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 03/14] x86/alternative: Convert alternative()

From: "Borislav Petkov (AMD)" <[email protected]>

Split conversion deliberately into minimal pieces to ease bisection
because debugging alternatives is a nightmare.

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 8554c2c339ff..c622ec7c4462 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -316,14 +316,11 @@ static inline int alternatives_text_reserved(void *start, void *end)
* without volatile and memory clobber.
*/
#define alternative(oldinstr, newinstr, ft_flags) \
- asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
+ asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")

#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

-#define n_alternative(oldinstr, newinstr, ft_flags) \
- asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
-
#define n_alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

--
2.43.0


2024-05-31 12:36:27

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 02/14] x86/alternatives: Add nested alternatives macros

From: Peter Zijlstra <[email protected]>

Instead of making increasingly complicated ALTERNATIVE_n()
implementations, use a nested alternative expression.

The only difference between:

ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2)

and

ALTERNATIVE(ALTERNATIVE(oldinst, newinst1, flag1),
newinst2, flag2)

is that the outer alternative can add additional padding when the inner
alternative is the shorter one, which then results in
alt_instr::instrlen being inconsistent.

However, this is easily remedied since the alt_instr entries will be
consecutive and it is trivial to compute the max(alt_instr::instrlen) at
runtime while patching.

Specifically, after this the ALTERNATIVE_2 macro, after CPP expansion
(and manual layout), looks like this:

.macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
740:
740: \oldinstr ;
741: .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;
742: .pushsection .altinstructions,"a" ;
altinstr_entry 740b,743f,\ft_flags1,742b-740b,744f-743f ;
.popsection ;
.pushsection .altinstr_replacement,"ax" ;
743: \newinstr1 ;
744: .popsection ; ;
741: .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;
742: .pushsection .altinstructions,"a" ;
altinstr_entry 740b,743f,\ft_flags2,742b-740b,744f-743f ;
.popsection ;
.pushsection .altinstr_replacement,"ax" ;
743: \newinstr2 ;
744: .popsection ;
.endm

The only label that is ambiguous is 740, however they all reference the
same spot, so that doesn't matter.

NOTE: obviously only @oldinstr may be an alternative; making @newinstr
an alternative would mean patching .altinstr_replacement which very
likely isn't what is intended, also the labels will be confused in that
case.

[ bp: Debug an issue where it would match the wrong two insns and
and consider them nested due to the same signed offsets in the
.alternative section and use instr_va() to compare the full virtual
addresses instead.

- Use new labels to denote that the new, nested
alternatives are being used when staring at preprocessed output. ]

Signed-off-by: Peter Zijlstra <[email protected]>
Co-developed-by: Borislav Petkov (AMD) <[email protected]>
Signed-off-by: Borislav Petkov (AMD) <[email protected]>
Link: https://lkml.kernel.org/r/[email protected]
---
arch/x86/include/asm/alternative.h | 110 ++++++++++++++++++++++++++++-
arch/x86/kernel/alternative.c | 20 +++++-
tools/objtool/arch/x86/special.c | 23 ++++++
tools/objtool/special.c | 16 ++---
4 files changed, 158 insertions(+), 11 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 6db78909180a..8554c2c339ff 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -161,8 +161,13 @@ static inline int alternatives_text_reserved(void *start, void *end)

#define alt_end_marker "663"
#define alt_slen "662b-661b"
+#define n_alt_slen "772b-771b"
+
#define alt_total_slen alt_end_marker"b-661b"
+#define n_alt_total_slen "773b-771b"
+
#define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"
+#define n_alt_rlen "775f-774f"

#define OLDINSTR(oldinstr, num) \
"# ALT: oldnstr\n" \
@@ -172,6 +177,14 @@ static inline int alternatives_text_reserved(void *start, void *end)
"((" alt_rlen(num) ")-(" alt_slen ")),0x90\n" \
alt_end_marker ":\n"

+#define N_OLDINSTR(oldinstr) \
+ "# N_ALT: oldinstr\n" \
+ "771:\n\t" oldinstr "\n772:\n" \
+ "# N_ALT: padding\n" \
+ ".skip -(((" n_alt_rlen ")-(" n_alt_slen ")) > 0) * " \
+ "((" n_alt_rlen ")-(" n_alt_slen ")),0x90\n" \
+ "773:\n"
+
/*
* gas compatible max based on the idea from:
* http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
@@ -209,10 +222,25 @@ static inline int alternatives_text_reserved(void *start, void *end)
" .byte " alt_total_slen "\n" /* source len */ \
" .byte " alt_rlen(num) "\n" /* replacement len */

+#define N_ALTINSTR_ENTRY(ft_flags) \
+ ".pushsection .altinstructions,\"a\"\n" \
+ " .long 771b - .\n" /* label */ \
+ " .long 774f - .\n" /* new instruction */ \
+ " .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
+ " .byte " n_alt_total_slen "\n" /* source len */ \
+ " .byte " n_alt_rlen "\n" /* replacement len */ \
+ ".popsection\n"
+
#define ALTINSTR_REPLACEMENT(newinstr, num) /* replacement */ \
"# ALT: replacement " #num "\n" \
b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"

+#define N_ALTINSTR_REPLACEMENT(newinstr) /* replacement */ \
+ ".pushsection .altinstr_replacement, \"ax\"\n" \
+ "# N_ALT: replacement\n" \
+ "774:\n\t" newinstr "\n775:\n" \
+ ".popsection\n"
+
/* alternative assembly primitive: */
#define ALTERNATIVE(oldinstr, newinstr, ft_flags) \
OLDINSTR(oldinstr, 1) \
@@ -223,6 +251,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
ALTINSTR_REPLACEMENT(newinstr, 1) \
".popsection\n"

+/* Nested alternatives macro variant */
+#define N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ N_OLDINSTR(oldinstr) \
+ N_ALTINSTR_ENTRY(ft_flags) \
+ N_ALTINSTR_REPLACEMENT(newinstr)
+
#define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
OLDINSTR_2(oldinstr, 1, 2) \
".pushsection .altinstructions,\"a\"\n" \
@@ -234,11 +268,21 @@ static inline int alternatives_text_reserved(void *start, void *end)
ALTINSTR_REPLACEMENT(newinstr2, 2) \
".popsection\n"

+#define N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2) \
+ N_ALTERNATIVE(N_ALTERNATIVE(oldinst, newinst1, flag1), \
+ newinst2, flag2)
+
/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
newinstr_yes, ft_flags)

+/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
+#define N_ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
+ N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
+ newinstr_yes, ft_flags)
+
+
#define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
newinsn3, ft_flags3) \
OLDINSTR_3(oldinsn, 1, 2, 3) \
@@ -253,6 +297,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
ALTINSTR_REPLACEMENT(newinsn3, 3) \
".popsection\n"

+
+#define N_ALTERNATIVE_3(oldinst, newinst1, flag1, newinst2, flag2, \
+ newinst3, flag3) \
+ N_ALTERNATIVE(N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2), \
+ newinst3, flag3)
+
/*
* Alternative instructions for different CPU types or capabilities.
*
@@ -271,6 +321,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

+#define n_alternative(oldinstr, newinstr, ft_flags) \
+ asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
+
+#define n_alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
+ asm_inline volatile(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
+
/*
* Alternative inline assembly with input.
*
@@ -288,11 +344,24 @@ static inline int alternatives_text_reserved(void *start, void *end)
asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)

+#define n_alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
+ asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ : output : "i" (0), ## input)
+
+
/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

+#define n_alternative_input(oldinstr, newinstr, ft_flags, input...) \
+ asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ : : "i" (0), ## input)
+
+#define n_alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
+ asm_inline volatile (N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
+ : output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
+
/*
* Like alternative_call, but there are two features and respective functions.
* If CPU has feature2, function2 is used.
@@ -307,6 +376,15 @@ static inline int alternatives_text_reserved(void *start, void *end)
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
[new2] "i" (newfunc2), ## input)

+#define n_alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
+ output, input...) \
+ asm_inline volatile (N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1,\
+ "call %P[new2]", ft_flags2) \
+ : output, ASM_CALL_CONSTRAINT \
+ : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
+ [new2] "i" (newfunc2), ## input)
+
+
/*
* use this macro(s) if you need more than one output parameter
* in alternative_io
@@ -403,6 +481,27 @@ void nop_func(void);
.popsection
.endm

+#define __N_ALTERNATIVE(oldinst, newinst, flag) \
+740: \
+ oldinst ; \
+741: \
+ .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;\
+742: \
+ .pushsection .altinstructions,"a" ; \
+ altinstr_entry 740b,743f,flag,742b-740b,744f-743f ; \
+ .popsection ; \
+ .pushsection .altinstr_replacement,"ax" ; \
+743: \
+ newinst ; \
+744: \
+ .popsection ;
+
+
+.macro N_ALTERNATIVE oldinstr, newinstr, ft_flags
+ __N_ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
+.endm
+
+
#define old_len 141b-140b
#define new_len1 144f-143f
#define new_len2 145f-144f
@@ -417,7 +516,6 @@ void nop_func(void);
#define alt_max_2(a, b) ((a) ^ (((a) ^ (b)) & -(-((a) < (b)))))
#define alt_max_3(a, b, c) (alt_max_2(alt_max_2(a, b), c))

-
/*
* Same as ALTERNATIVE macro above but for two alternatives. If CPU
* has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
@@ -445,6 +543,11 @@ void nop_func(void);
.popsection
.endm

+.macro N_ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
+ __N_ALTERNATIVE(__N_ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
+ \newinstr2, \ft_flags2)
+.endm
+
.macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
140:
\oldinstr
@@ -470,6 +573,11 @@ void nop_func(void);
.popsection
.endm

+.macro N_ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
+ __N_ALTERNATIVE(N_ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
+ \newinstr3, \ft_flags3)
+.endm
+
/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
ALTERNATIVE_2 oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c
index 89de61243272..37596a417094 100644
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -432,6 +432,11 @@ static int alt_replace_call(u8 *instr, u8 *insn_buff, struct alt_instr *a)
return 5;
}

+static inline u8 * instr_va(struct alt_instr *i)
+{
+ return (u8 *)&i->instr_offset + i->instr_offset;
+}
+
/*
* Replace instructions with better alternatives for this CPU type. This runs
* before SMP is initialized to avoid SMP problems with self modifying code.
@@ -447,7 +452,7 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
{
u8 insn_buff[MAX_PATCH_LEN];
u8 *instr, *replacement;
- struct alt_instr *a;
+ struct alt_instr *a, *b;

DPRINTK(ALT, "alt table %px, -> %px", start, end);

@@ -473,7 +478,18 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
for (a = start; a < end; a++) {
int insn_buff_sz = 0;

- instr = (u8 *)&a->instr_offset + a->instr_offset;
+ /*
+ * In case of nested ALTERNATIVE()s the outer alternative might
+ * add more padding. To ensure consistent patching find the max
+ * padding for all alt_instr entries for this site (nested
+ * alternatives result in consecutive entries).
+ */
+ for (b = a+1; b < end && instr_va(b) == instr_va(a); b++) {
+ u8 len = max(a->instrlen, b->instrlen);
+ a->instrlen = b->instrlen = len;
+ }
+
+ instr = instr_va(a);
replacement = (u8 *)&a->repl_offset + a->repl_offset;
BUG_ON(a->instrlen > sizeof(insn_buff));
BUG_ON(a->cpuid >= (NCAPINTS + NBUGINTS) * 32);
diff --git a/tools/objtool/arch/x86/special.c b/tools/objtool/arch/x86/special.c
index 4134d27c696b..4ea0f9815fda 100644
--- a/tools/objtool/arch/x86/special.c
+++ b/tools/objtool/arch/x86/special.c
@@ -9,6 +9,29 @@

void arch_handle_alternative(unsigned short feature, struct special_alt *alt)
{
+ static struct special_alt *group, *prev;
+
+ /*
+ * Recompute orig_len for nested ALTERNATIVE()s.
+ */
+ if (group && group->orig_sec == alt->orig_sec &&
+ group->orig_off == alt->orig_off) {
+
+ struct special_alt *iter = group;
+ for (;;) {
+ unsigned int len = max(iter->orig_len, alt->orig_len);
+ iter->orig_len = alt->orig_len = len;
+
+ if (iter == prev)
+ break;
+
+ iter = list_next_entry(iter, list);
+ }
+
+ } else group = alt;
+
+ prev = alt;
+
switch (feature) {
case X86_FEATURE_SMAP:
/*
diff --git a/tools/objtool/special.c b/tools/objtool/special.c
index 91b1950f5bd8..097a69db82a0 100644
--- a/tools/objtool/special.c
+++ b/tools/objtool/special.c
@@ -84,6 +84,14 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
entry->new_len);
}

+ orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
+ if (!orig_reloc) {
+ WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
+ return -1;
+ }
+
+ reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
+
if (entry->feature) {
unsigned short feature;

@@ -94,14 +102,6 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
arch_handle_alternative(feature, alt);
}

- orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
- if (!orig_reloc) {
- WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
- return -1;
- }
-
- reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
-
if (!entry->group || alt->new_len) {
new_reloc = find_reloc_by_dest(elf, sec, offset + entry->new);
if (!new_reloc) {
--
2.43.0


2024-05-31 12:36:44

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 05/14] x86/alternative: Convert alternative_input()

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 1dd445c6e2e1..7f5f26fc42a9 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -330,7 +330,7 @@ static inline int alternatives_text_reserved(void *start, void *end)
* Leaving an unused argument 0 to keep API compatibility.
*/
#define alternative_input(oldinstr, newinstr, ft_flags, input...) \
- asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: : "i" (0), ## input)

/* Like alternative_input, but with a single output argument */
@@ -348,10 +348,6 @@ static inline int alternatives_text_reserved(void *start, void *end)
asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

-#define n_alternative_input(oldinstr, newinstr, ft_flags, input...) \
- asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
- : : "i" (0), ## input)
-
#define n_alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile (N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
--
2.43.0


2024-05-31 12:36:55

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 04/14] x86/alternative: Convert alternative_2()

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 3 ---
1 file changed, 3 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index c622ec7c4462..1dd445c6e2e1 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -319,9 +319,6 @@ static inline int alternatives_text_reserved(void *start, void *end)
asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")

#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
- asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
-
-#define n_alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
asm_inline volatile(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

/*
--
2.43.0


2024-05-31 12:37:07

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 06/14] x86/alternative: Convert alternative_io()

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 7f5f26fc42a9..8a0a6ba4b741 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -335,14 +335,9 @@ static inline int alternatives_text_reserved(void *start, void *end)

/* Like alternative_input, but with a single output argument */
#define alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
- asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)

-#define n_alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
- asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
- : output : "i" (0), ## input)
-
-
/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
--
2.43.0


2024-05-31 12:37:17

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 07/14] x86/alternative: Convert alternative_call()

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 8a0a6ba4b741..bc696c60848d 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -340,11 +340,7 @@ static inline int alternatives_text_reserved(void *start, void *end)

/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
- asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
- : output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
-
-#define n_alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
- asm_inline volatile (N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
+ asm_inline volatile(N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

/*
--
2.43.0


2024-05-31 12:37:22

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 08/14] x86/alternative: Convert alternative_call_2()

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 21 ++++++---------------
1 file changed, 6 insertions(+), 15 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index bc696c60848d..bad1558d6382 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -349,23 +349,14 @@ static inline int alternatives_text_reserved(void *start, void *end)
* Otherwise, if CPU has feature1, function1 is used.
* Otherwise, old function is used.
*/
-#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
- output, input...) \
- asm_inline volatile (ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
- "call %c[new2]", ft_flags2) \
- : output, ASM_CALL_CONSTRAINT \
- : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
+#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
+ output, input...) \
+ asm_inline volatile(N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1, \
+ "call %P[new2]", ft_flags2) \
+ : output, ASM_CALL_CONSTRAINT \
+ : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
[new2] "i" (newfunc2), ## input)

-#define n_alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
- output, input...) \
- asm_inline volatile (N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1,\
- "call %P[new2]", ft_flags2) \
- : output, ASM_CALL_CONSTRAINT \
- : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
- [new2] "i" (newfunc2), ## input)
-
-
/*
* use this macro(s) if you need more than one output parameter
* in alternative_io
--
2.43.0


2024-05-31 12:38:12

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 09/14] x86/alternative: Convert ALTERNATIVE_TERNARY()

From: "Borislav Petkov (AMD)" <[email protected]>

The C macro.

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 6 ------
1 file changed, 6 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index bad1558d6382..73ee18705ef1 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -274,15 +274,9 @@ static inline int alternatives_text_reserved(void *start, void *end)

/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
- ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
- newinstr_yes, ft_flags)
-
-/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
-#define N_ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
newinstr_yes, ft_flags)

-
#define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
newinsn3, ft_flags3) \
OLDINSTR_3(oldinsn, 1, 2, 3) \
--
2.43.0


2024-05-31 12:38:20

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 10/14] x86/alternative: Convert ALTERNATIVE_3()

From: "Borislav Petkov (AMD)" <[email protected]>

Fixup label numbering too as the new macros have new label numbers.

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 24 ++++--------------------
arch/x86/kernel/fpu/xstate.h | 4 ++--
2 files changed, 6 insertions(+), 22 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 73ee18705ef1..0df99855e003 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -277,26 +277,10 @@ static inline int alternatives_text_reserved(void *start, void *end)
N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
newinstr_yes, ft_flags)

-#define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
- newinsn3, ft_flags3) \
- OLDINSTR_3(oldinsn, 1, 2, 3) \
- ".pushsection .altinstructions,\"a\"\n" \
- ALTINSTR_ENTRY(ft_flags1, 1) \
- ALTINSTR_ENTRY(ft_flags2, 2) \
- ALTINSTR_ENTRY(ft_flags3, 3) \
- ".popsection\n" \
- ".pushsection .altinstr_replacement, \"ax\"\n" \
- ALTINSTR_REPLACEMENT(newinsn1, 1) \
- ALTINSTR_REPLACEMENT(newinsn2, 2) \
- ALTINSTR_REPLACEMENT(newinsn3, 3) \
- ".popsection\n"
-
-
-#define N_ALTERNATIVE_3(oldinst, newinst1, flag1, newinst2, flag2, \
- newinst3, flag3) \
- N_ALTERNATIVE(N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2), \
- newinst3, flag3)
-
+#define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
+ newinstr3, ft_flags3) \
+ N_ALTERNATIVE(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
+ newinstr3, ft_flags3)
/*
* Alternative instructions for different CPU types or capabilities.
*
diff --git a/arch/x86/kernel/fpu/xstate.h b/arch/x86/kernel/fpu/xstate.h
index 05df04f39628..4fe8501efc6c 100644
--- a/arch/x86/kernel/fpu/xstate.h
+++ b/arch/x86/kernel/fpu/xstate.h
@@ -108,7 +108,7 @@ static inline u64 xfeatures_mask_independent(void)
*
* We use XSAVE as a fallback.
*
- * The 661 label is defined in the ALTERNATIVE* macros as the address of the
+ * The 771 label is defined in the ALTERNATIVE* macros as the address of the
* original instruction which gets replaced. We need to use it here as the
* address of the instruction where we might get an exception at.
*/
@@ -120,7 +120,7 @@ static inline u64 xfeatures_mask_independent(void)
"\n" \
"xor %[err], %[err]\n" \
"3:\n" \
- _ASM_EXTABLE_TYPE_REG(661b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
+ _ASM_EXTABLE_TYPE_REG(771b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
: [err] "=r" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")
--
2.43.0


2024-05-31 12:38:29

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 11/14] x86/alternative: Convert the asm ALTERNATIVE() macro

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 22 +---------------------
1 file changed, 1 insertion(+), 21 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 0df99855e003..4b17267f3f2f 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -413,24 +413,6 @@ void nop_func(void);
* @newinstr. ".skip" directive takes care of proper instruction padding
* in case @newinstr is longer than @oldinstr.
*/
-.macro ALTERNATIVE oldinstr, newinstr, ft_flags
-140:
- \oldinstr
-141:
- .skip -(((144f-143f)-(141b-140b)) > 0) * ((144f-143f)-(141b-140b)),0x90
-142:
-
- .pushsection .altinstructions,"a"
- altinstr_entry 140b,143f,\ft_flags,142b-140b,144f-143f
- .popsection
-
- .pushsection .altinstr_replacement,"ax"
-143:
- \newinstr
-144:
- .popsection
-.endm
-
#define __N_ALTERNATIVE(oldinst, newinst, flag) \
740: \
oldinst ; \
@@ -446,12 +428,10 @@ void nop_func(void);
744: \
.popsection ;

-
-.macro N_ALTERNATIVE oldinstr, newinstr, ft_flags
+.macro ALTERNATIVE oldinstr, newinstr, ft_flags
__N_ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
.endm

-
#define old_len 141b-140b
#define new_len1 144f-143f
#define new_len2 145f-144f
--
2.43.0


2024-05-31 12:38:33

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 13/14] x86/alternative: Convert the asm ALTERNATIVE_3() macro

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 25 -------------------------
1 file changed, 25 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 82a79e17e952..19b574331ffd 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -457,31 +457,6 @@ void nop_func(void);
.endm

.macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
-140:
- \oldinstr
-141:
- .skip -((alt_max_3(new_len1, new_len2, new_len3) - (old_len)) > 0) * \
- (alt_max_3(new_len1, new_len2, new_len3) - (old_len)),0x90
-142:
-
- .pushsection .altinstructions,"a"
- altinstr_entry 140b,143f,\ft_flags1,142b-140b,144f-143f
- altinstr_entry 140b,144f,\ft_flags2,142b-140b,145f-144f
- altinstr_entry 140b,145f,\ft_flags3,142b-140b,146f-145f
- .popsection
-
- .pushsection .altinstr_replacement,"ax"
-143:
- \newinstr1
-144:
- \newinstr2
-145:
- \newinstr3
-146:
- .popsection
-.endm
-
-.macro N_ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
__N_ALTERNATIVE(N_ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
\newinstr3, \ft_flags3)
.endm
--
2.43.0


2024-05-31 12:38:35

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 12/14] x86/alternative: Convert the asm ALTERNATIVE_2() macro

From: "Borislav Petkov (AMD)" <[email protected]>

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 22 ----------------------
1 file changed, 22 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 4b17267f3f2f..82a79e17e952 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -452,28 +452,6 @@ void nop_func(void);
* @feature2, it replaces @oldinstr with @feature2.
*/
.macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
-140:
- \oldinstr
-141:
- .skip -((alt_max_2(new_len1, new_len2) - (old_len)) > 0) * \
- (alt_max_2(new_len1, new_len2) - (old_len)),0x90
-142:
-
- .pushsection .altinstructions,"a"
- altinstr_entry 140b,143f,\ft_flags1,142b-140b,144f-143f
- altinstr_entry 140b,144f,\ft_flags2,142b-140b,145f-144f
- .popsection
-
- .pushsection .altinstr_replacement,"ax"
-143:
- \newinstr1
-144:
- \newinstr2
-145:
- .popsection
-.endm
-
-.macro N_ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
__N_ALTERNATIVE(__N_ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
\newinstr2, \ft_flags2)
.endm
--
2.43.0


2024-05-31 12:39:21

by Borislav Petkov

[permalink] [raw]
Subject: [PATCH 14/14] x86/alternative: Replace the old macros

From: "Borislav Petkov (AMD)" <[email protected]>

Now that the new macros have been gradually put in place, replace the
old ones. Leave the new label numbers starting at 7xx as a hint that the
new nested alternatives are being used now.

Signed-off-by: Borislav Petkov (AMD) <[email protected]>
---
arch/x86/include/asm/alternative.h | 144 +++++++----------------------
arch/x86/kernel/fpu/xstate.h | 2 +-
2 files changed, 33 insertions(+), 113 deletions(-)

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 19b574331ffd..f02867092a49 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -156,131 +156,51 @@ static inline int alternatives_text_reserved(void *start, void *end)

#define ALT_CALL_INSTR "call BUG_func"

-#define b_replacement(num) "664"#num
-#define e_replacement(num) "665"#num
+#define alt_slen "772b-771b"
+#define alt_total_slen "773b-771b"
+#define alt_rlen "775f-774f"

-#define alt_end_marker "663"
-#define alt_slen "662b-661b"
-#define n_alt_slen "772b-771b"
-
-#define alt_total_slen alt_end_marker"b-661b"
-#define n_alt_total_slen "773b-771b"
-
-#define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"
-#define n_alt_rlen "775f-774f"
-
-#define OLDINSTR(oldinstr, num) \
- "# ALT: oldnstr\n" \
- "661:\n\t" oldinstr "\n662:\n" \
- "# ALT: padding\n" \
- ".skip -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * " \
- "((" alt_rlen(num) ")-(" alt_slen ")),0x90\n" \
- alt_end_marker ":\n"
-
-#define N_OLDINSTR(oldinstr) \
- "# N_ALT: oldinstr\n" \
+#define OLDINSTR(oldinstr) \
+ "# ALT: oldinstr\n" \
"771:\n\t" oldinstr "\n772:\n" \
- "# N_ALT: padding\n" \
- ".skip -(((" n_alt_rlen ")-(" n_alt_slen ")) > 0) * " \
- "((" n_alt_rlen ")-(" n_alt_slen ")),0x90\n" \
+ "# ALT: padding\n" \
+ ".skip -(((" alt_rlen ")-(" alt_slen ")) > 0) * " \
+ "((" alt_rlen ")-(" alt_slen ")),0x90\n" \
"773:\n"

-/*
- * gas compatible max based on the idea from:
- * http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
- *
- * The additional "-" is needed because gas uses a "true" value of -1.
- */
-#define alt_max_short(a, b) "((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"
-
-/*
- * Pad the second replacement alternative with additional NOPs if it is
- * additionally longer than the first replacement alternative.
- */
-#define OLDINSTR_2(oldinstr, num1, num2) \
- "# ALT: oldinstr2\n" \
- "661:\n\t" oldinstr "\n662:\n" \
- "# ALT: padding2\n" \
- ".skip -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * " \
- "(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")), 0x90\n" \
- alt_end_marker ":\n"
-
-#define OLDINSTR_3(oldinsn, n1, n2, n3) \
- "# ALT: oldinstr3\n" \
- "661:\n\t" oldinsn "\n662:\n" \
- "# ALT: padding3\n" \
- ".skip -((" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
- " - (" alt_slen ")) > 0) * " \
- "(" alt_max_short(alt_max_short(alt_rlen(n1), alt_rlen(n2)), alt_rlen(n3)) \
- " - (" alt_slen ")), 0x90\n" \
- alt_end_marker ":\n"
-
-#define ALTINSTR_ENTRY(ft_flags, num) \
- " .long 661b - .\n" /* label */ \
- " .long " b_replacement(num)"f - .\n" /* new instruction */ \
- " .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
- " .byte " alt_total_slen "\n" /* source len */ \
- " .byte " alt_rlen(num) "\n" /* replacement len */
-
-#define N_ALTINSTR_ENTRY(ft_flags) \
+#define ALTINSTR_ENTRY(ft_flags) \
".pushsection .altinstructions,\"a\"\n" \
" .long 771b - .\n" /* label */ \
" .long 774f - .\n" /* new instruction */ \
" .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
- " .byte " n_alt_total_slen "\n" /* source len */ \
- " .byte " n_alt_rlen "\n" /* replacement len */ \
+ " .byte " alt_total_slen "\n" /* source len */ \
+ " .byte " alt_rlen "\n" /* replacement len */ \
".popsection\n"

-#define ALTINSTR_REPLACEMENT(newinstr, num) /* replacement */ \
- "# ALT: replacement " #num "\n" \
- b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"
-
-#define N_ALTINSTR_REPLACEMENT(newinstr) /* replacement */ \
- ".pushsection .altinstr_replacement, \"ax\"\n" \
- "# N_ALT: replacement\n" \
- "774:\n\t" newinstr "\n775:\n" \
+#define ALTINSTR_REPLACEMENT(newinstr) /* replacement */ \
+ ".pushsection .altinstr_replacement, \"ax\"\n" \
+ "# ALT: replacement\n" \
+ "774:\n\t" newinstr "\n775:\n" \
".popsection\n"

/* alternative assembly primitive: */
#define ALTERNATIVE(oldinstr, newinstr, ft_flags) \
- OLDINSTR(oldinstr, 1) \
- ".pushsection .altinstructions,\"a\"\n" \
- ALTINSTR_ENTRY(ft_flags, 1) \
- ".popsection\n" \
- ".pushsection .altinstr_replacement, \"ax\"\n" \
- ALTINSTR_REPLACEMENT(newinstr, 1) \
- ".popsection\n"
-
-/* Nested alternatives macro variant */
-#define N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
- N_OLDINSTR(oldinstr) \
- N_ALTINSTR_ENTRY(ft_flags) \
- N_ALTINSTR_REPLACEMENT(newinstr)
+ OLDINSTR(oldinstr) \
+ ALTINSTR_ENTRY(ft_flags) \
+ ALTINSTR_REPLACEMENT(newinstr)

#define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
- OLDINSTR_2(oldinstr, 1, 2) \
- ".pushsection .altinstructions,\"a\"\n" \
- ALTINSTR_ENTRY(ft_flags1, 1) \
- ALTINSTR_ENTRY(ft_flags2, 2) \
- ".popsection\n" \
- ".pushsection .altinstr_replacement, \"ax\"\n" \
- ALTINSTR_REPLACEMENT(newinstr1, 1) \
- ALTINSTR_REPLACEMENT(newinstr2, 2) \
- ".popsection\n"
-
-#define N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2) \
- N_ALTERNATIVE(N_ALTERNATIVE(oldinst, newinst1, flag1), \
- newinst2, flag2)
+ ALTERNATIVE(ALTERNATIVE(oldinstr, newinstr1, ft_flags1), newinstr2, ft_flags2)

/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
#define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
- N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
- newinstr_yes, ft_flags)
+ ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, newinstr_yes, ft_flags)

#define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
newinstr3, ft_flags3) \
- N_ALTERNATIVE(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
+ ALTERNATIVE(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
newinstr3, ft_flags3)
+
/*
* Alternative instructions for different CPU types or capabilities.
*
@@ -294,10 +214,10 @@ static inline int alternatives_text_reserved(void *start, void *end)
* without volatile and memory clobber.
*/
#define alternative(oldinstr, newinstr, ft_flags) \
- asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
+ asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")

#define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
- asm_inline volatile(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
+ asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")

/*
* Alternative inline assembly with input.
@@ -308,17 +228,17 @@ static inline int alternatives_text_reserved(void *start, void *end)
* Leaving an unused argument 0 to keep API compatibility.
*/
#define alternative_input(oldinstr, newinstr, ft_flags, input...) \
- asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: : "i" (0), ## input)

/* Like alternative_input, but with a single output argument */
#define alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
- asm_inline volatile(N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
+ asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)

/* Like alternative_io, but for replacing a direct call with another one. */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
- asm_inline volatile(N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
+ asm_inline volatile(ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

/*
@@ -329,7 +249,7 @@ static inline int alternatives_text_reserved(void *start, void *end)
*/
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
output, input...) \
- asm_inline volatile(N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1, \
+ asm_inline volatile(ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1, \
"call %P[new2]", ft_flags2) \
: output, ASM_CALL_CONSTRAINT \
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
@@ -413,7 +333,7 @@ void nop_func(void);
* @newinstr. ".skip" directive takes care of proper instruction padding
* in case @newinstr is longer than @oldinstr.
*/
-#define __N_ALTERNATIVE(oldinst, newinst, flag) \
+#define __ALTERNATIVE(oldinst, newinst, flag) \
740: \
oldinst ; \
741: \
@@ -429,7 +349,7 @@ void nop_func(void);
.popsection ;

.macro ALTERNATIVE oldinstr, newinstr, ft_flags
- __N_ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
+ __ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
.endm

#define old_len 141b-140b
@@ -452,12 +372,12 @@ void nop_func(void);
* @feature2, it replaces @oldinstr with @feature2.
*/
.macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
- __N_ALTERNATIVE(__N_ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
+ __ALTERNATIVE(__ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
\newinstr2, \ft_flags2)
.endm

.macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
- __N_ALTERNATIVE(N_ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
+ __ALTERNATIVE(ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
\newinstr3, \ft_flags3)
.endm

diff --git a/arch/x86/kernel/fpu/xstate.h b/arch/x86/kernel/fpu/xstate.h
index 4fe8501efc6c..70903c12a911 100644
--- a/arch/x86/kernel/fpu/xstate.h
+++ b/arch/x86/kernel/fpu/xstate.h
@@ -134,7 +134,7 @@ static inline u64 xfeatures_mask_independent(void)
XRSTORS, X86_FEATURE_XSAVES) \
"\n" \
"3:\n" \
- _ASM_EXTABLE_TYPE(661b, 3b, EX_TYPE_FPU_RESTORE) \
+ _ASM_EXTABLE_TYPE(771b, 3b, EX_TYPE_FPU_RESTORE) \
: \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")
--
2.43.0


2024-05-31 14:31:04

by Uros Bizjak

[permalink] [raw]
Subject: Re: [PATCH 02/14] x86/alternatives: Add nested alternatives macros


> From: Peter Zijlstra <[email protected]>
>
> Instead of making increasingly complicated ALTERNATIVE_n()
> implementations, use a nested alternative expression.
>
> The only difference between:
>
> ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2)
>
> and
>
> ALTERNATIVE(ALTERNATIVE(oldinst, newinst1, flag1),
> newinst2, flag2)
>
> is that the outer alternative can add additional padding when the inner
> alternative is the shorter one, which then results in
> alt_instr::instrlen being inconsistent.
>
> However, this is easily remedied since the alt_instr entries will be
> consecutive and it is trivial to compute the max(alt_instr::instrlen) at
> runtime while patching.
>
> Specifically, after this the ALTERNATIVE_2 macro, after CPP expansion
> (and manual layout), looks like this:
>
> .macro ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
> 740:
> 740: \oldinstr ;
> 741: .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;
> 742: .pushsection .altinstructions,"a" ;
> altinstr_entry 740b,743f,\ft_flags1,742b-740b,744f-743f ;
> .popsection ;
> .pushsection .altinstr_replacement,"ax" ;
> 743: \newinstr1 ;
> 744: .popsection ; ;
> 741: .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;
> 742: .pushsection .altinstructions,"a" ;
> altinstr_entry 740b,743f,\ft_flags2,742b-740b,744f-743f ;
> .popsection ;
> .pushsection .altinstr_replacement,"ax" ;
> 743: \newinstr2 ;
> 744: .popsection ;
> .endm
>
> The only label that is ambiguous is 740, however they all reference the
> same spot, so that doesn't matter.
>
> NOTE: obviously only @oldinstr may be an alternative; making @newinstr
> an alternative would mean patching .altinstr_replacement which very
> likely isn't what is intended, also the labels will be confused in that
> case.
>
> [ bp: Debug an issue where it would match the wrong two insns and
> and consider them nested due to the same signed offsets in the
> .alternative section and use instr_va() to compare the full virtual
> addresses instead.
>
> - Use new labels to denote that the new, nested
> alternatives are being used when staring at preprocessed output. ]
>
> Signed-off-by: Peter Zijlstra <[email protected]>
> Co-developed-by: Borislav Petkov (AMD) <[email protected]>
> Signed-off-by: Borislav Petkov (AMD) <[email protected]>
> Link: https://lkml.kernel.org/r/[email protected]
> ---
> arch/x86/include/asm/alternative.h | 110 ++++++++++++++++++++++++++++-
> arch/x86/kernel/alternative.c | 20 +++++-
> tools/objtool/arch/x86/special.c | 23 ++++++
> tools/objtool/special.c | 16 ++---
> 4 files changed, 158 insertions(+), 11 deletions(-)
>
> diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
> index 6db78909180a..8554c2c339ff 100644
> --- a/arch/x86/include/asm/alternative.h
> +++ b/arch/x86/include/asm/alternative.h
> @@ -161,8 +161,13 @@ static inline int alternatives_text_reserved(void *start, void *end)
>
> #define alt_end_marker "663"
> #define alt_slen "662b-661b"
> +#define n_alt_slen "772b-771b"
> +
> #define alt_total_slen alt_end_marker"b-661b"
> +#define n_alt_total_slen "773b-771b"
> +
> #define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"
> +#define n_alt_rlen "775f-774f"
>
> #define OLDINSTR(oldinstr, num) \
> "# ALT: oldnstr\n" \
> @@ -172,6 +177,14 @@ static inline int alternatives_text_reserved(void *start, void *end)
> "((" alt_rlen(num) ")-(" alt_slen ")),0x90\n" \
> alt_end_marker ":\n"
>
> +#define N_OLDINSTR(oldinstr) \
> + "# N_ALT: oldinstr\n" \
> + "771:\n\t" oldinstr "\n772:\n" \
> + "# N_ALT: padding\n" \
> + ".skip -(((" n_alt_rlen ")-(" n_alt_slen ")) > 0) * " \
> + "((" n_alt_rlen ")-(" n_alt_slen ")),0x90\n" \
> + "773:\n"
> +
> /*
> * gas compatible max based on the idea from:
> * http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
> @@ -209,10 +222,25 @@ static inline int alternatives_text_reserved(void *start, void *end)
> " .byte " alt_total_slen "\n" /* source len */ \
> " .byte " alt_rlen(num) "\n" /* replacement len */
>
> +#define N_ALTINSTR_ENTRY(ft_flags) \
> + ".pushsection .altinstructions,\"a\"\n" \
> + " .long 771b - .\n" /* label */ \
> + " .long 774f - .\n" /* new instruction */ \
> + " .4byte " __stringify(ft_flags) "\n" /* feature + flags */ \
> + " .byte " n_alt_total_slen "\n" /* source len */ \
> + " .byte " n_alt_rlen "\n" /* replacement len */ \
> + ".popsection\n"
> +
> #define ALTINSTR_REPLACEMENT(newinstr, num) /* replacement */ \
> "# ALT: replacement " #num "\n" \
> b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n"
>
> +#define N_ALTINSTR_REPLACEMENT(newinstr) /* replacement */ \
> + ".pushsection .altinstr_replacement, \"ax\"\n" \
> + "# N_ALT: replacement\n" \
> + "774:\n\t" newinstr "\n775:\n" \
> + ".popsection\n"
> +
> /* alternative assembly primitive: */
> #define ALTERNATIVE(oldinstr, newinstr, ft_flags) \
> OLDINSTR(oldinstr, 1) \
> @@ -223,6 +251,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
> ALTINSTR_REPLACEMENT(newinstr, 1) \
> ".popsection\n"
>
> +/* Nested alternatives macro variant */
> +#define N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
> + N_OLDINSTR(oldinstr) \
> + N_ALTINSTR_ENTRY(ft_flags) \
> + N_ALTINSTR_REPLACEMENT(newinstr)
> +
> #define ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
> OLDINSTR_2(oldinstr, 1, 2) \
> ".pushsection .altinstructions,\"a\"\n" \
> @@ -234,11 +268,21 @@ static inline int alternatives_text_reserved(void *start, void *end)
> ALTINSTR_REPLACEMENT(newinstr2, 2) \
> ".popsection\n"
>
> +#define N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2) \
> + N_ALTERNATIVE(N_ALTERNATIVE(oldinst, newinst1, flag1), \
> + newinst2, flag2)
> +
> /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
> #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
> ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
> newinstr_yes, ft_flags)
>
> +/* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
> +#define N_ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
> + N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
> + newinstr_yes, ft_flags)
> +
> +
> #define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
> newinsn3, ft_flags3) \
> OLDINSTR_3(oldinsn, 1, 2, 3) \
> @@ -253,6 +297,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
> ALTINSTR_REPLACEMENT(newinsn3, 3) \
> ".popsection\n"
>
> +
> +#define N_ALTERNATIVE_3(oldinst, newinst1, flag1, newinst2, flag2, \
> + newinst3, flag3) \
> + N_ALTERNATIVE(N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2), \
> + newinst3, flag3)
> +
> /*
> * Alternative instructions for different CPU types or capabilities.
> *
> @@ -271,6 +321,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
> #define alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
> asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
>
> +#define n_alternative(oldinstr, newinstr, ft_flags) \
> + asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
> +
> +#define n_alternative_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) \
> + asm_inline volatile(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memory")
> +
> /*
> * Alternative inline assembly with input.
> *
> @@ -288,11 +344,24 @@ static inline int alternatives_text_reserved(void *start, void *end)
> asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, ft_flags) \
> : output : "i" (0), ## input)
>
> +#define n_alternative_io(oldinstr, newinstr, ft_flags, output, input...) \
> + asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
> + : output : "i" (0), ## input)
> +
> +
> /* Like alternative_io, but for replacing a direct call with another one. */
> #define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
> asm_inline volatile (ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
> : output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)
>
> +#define n_alternative_input(oldinstr, newinstr, ft_flags, input...) \
> + asm_inline volatile (N_ALTERNATIVE(oldinstr, newinstr, ft_flags) \
> + : : "i" (0), ## input)
> +
> +#define n_alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
> + asm_inline volatile (N_ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
> + : output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

Please don't resurrect the %P modifier, use %c instead.

> +
> /*
> * Like alternative_call, but there are two features and respective functions.
> * If CPU has feature2, function2 is used.
> @@ -307,6 +376,15 @@ static inline int alternatives_text_reserved(void *start, void *end)
> : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
> [new2] "i" (newfunc2), ## input)
>
> +#define n_alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
> + output, input...) \
> + asm_inline volatile (N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1,\
> + "call %P[new2]", ft_flags2) \
> + : output, ASM_CALL_CONSTRAINT \
> + : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
> + [new2] "i" (newfunc2), ## input)

Here too, %P should be avoided in favor of %c.

Uros.

> +
> /*
> * use this macro(s) if you need more than one output parameter
> * in alternative_io
> @@ -403,6 +481,27 @@ void nop_func(void);
> .popsection
> .endm
>
> +#define __N_ALTERNATIVE(oldinst, newinst, flag) \
> +740: \
> + oldinst ; \
> +741: \
> + .skip -(((744f-743f)-(741b-740b)) > 0) * ((744f-743f)-(741b-740b)),0x90 ;\
> +742: \
> + .pushsection .altinstructions,"a" ; \
> + altinstr_entry 740b,743f,flag,742b-740b,744f-743f ; \
> + .popsection ; \
> + .pushsection .altinstr_replacement,"ax" ; \
> +743: \
> + newinst ; \
> +744: \
> + .popsection ;
> +
> +
> +.macro N_ALTERNATIVE oldinstr, newinstr, ft_flags
> + __N_ALTERNATIVE(\oldinstr, \newinstr, \ft_flags)
> +.endm
> +
> +
> #define old_len 141b-140b
> #define new_len1 144f-143f
> #define new_len2 145f-144f
> @@ -417,7 +516,6 @@ void nop_func(void);
> #define alt_max_2(a, b) ((a) ^ (((a) ^ (b)) & -(-((a) < (b)))))
> #define alt_max_3(a, b, c) (alt_max_2(alt_max_2(a, b), c))
>
> -
> /*
> * Same as ALTERNATIVE macro above but for two alternatives. If CPU
> * has @feature1, it replaces @oldinstr with @newinstr1. If CPU has
> @@ -445,6 +543,11 @@ void nop_func(void);
> .popsection
> .endm
>
> +.macro N_ALTERNATIVE_2 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2
> + __N_ALTERNATIVE(__N_ALTERNATIVE(\oldinstr, \newinstr1, \ft_flags1),
> + \newinstr2, \ft_flags2)
> +.endm
> +
> .macro ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
> 140:
> \oldinstr
> @@ -470,6 +573,11 @@ void nop_func(void);
> .popsection
> .endm
>
> +.macro N_ALTERNATIVE_3 oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, newinstr3, ft_flags3
> + __N_ALTERNATIVE(N_ALTERNATIVE_2(\oldinstr, \newinstr1, \ft_flags1, \newinstr2, \ft_flags2),
> + \newinstr3, \ft_flags3)
> +.endm
> +
> /* If @feature is set, patch in @newinstr_yes, otherwise @newinstr_no. */
> #define ALTERNATIVE_TERNARY(oldinstr, ft_flags, newinstr_yes, newinstr_no) \
> ALTERNATIVE_2 oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
> diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c
> index 89de61243272..37596a417094 100644
> --- a/arch/x86/kernel/alternative.c
> +++ b/arch/x86/kernel/alternative.c
> @@ -432,6 +432,11 @@ static int alt_replace_call(u8 *instr, u8 *insn_buff, struct alt_instr *a)
> return 5;
> }
>
> +static inline u8 * instr_va(struct alt_instr *i)
> +{
> + return (u8 *)&i->instr_offset + i->instr_offset;
> +}
> +
> /*
> * Replace instructions with better alternatives for this CPU type. This runs
> * before SMP is initialized to avoid SMP problems with self modifying code.
> @@ -447,7 +452,7 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
> {
> u8 insn_buff[MAX_PATCH_LEN];
> u8 *instr, *replacement;
> - struct alt_instr *a;
> + struct alt_instr *a, *b;
>
> DPRINTK(ALT, "alt table %px, -> %px", start, end);
>
> @@ -473,7 +478,18 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
> for (a = start; a < end; a++) {
> int insn_buff_sz = 0;
>
> - instr = (u8 *)&a->instr_offset + a->instr_offset;
> + /*
> + * In case of nested ALTERNATIVE()s the outer alternative might
> + * add more padding. To ensure consistent patching find the max
> + * padding for all alt_instr entries for this site (nested
> + * alternatives result in consecutive entries).
> + */
> + for (b = a+1; b < end && instr_va(b) == instr_va(a); b++) {
> + u8 len = max(a->instrlen, b->instrlen);
> + a->instrlen = b->instrlen = len;
> + }
> +
> + instr = instr_va(a);
> replacement = (u8 *)&a->repl_offset + a->repl_offset;
> BUG_ON(a->instrlen > sizeof(insn_buff));
> BUG_ON(a->cpuid >= (NCAPINTS + NBUGINTS) * 32);
> diff --git a/tools/objtool/arch/x86/special.c b/tools/objtool/arch/x86/special.c
> index 4134d27c696b..4ea0f9815fda 100644
> --- a/tools/objtool/arch/x86/special.c
> +++ b/tools/objtool/arch/x86/special.c
> @@ -9,6 +9,29 @@
>
> void arch_handle_alternative(unsigned short feature, struct special_alt *alt)
> {
> + static struct special_alt *group, *prev;
> +
> + /*
> + * Recompute orig_len for nested ALTERNATIVE()s.
> + */
> + if (group && group->orig_sec == alt->orig_sec &&
> + group->orig_off == alt->orig_off) {
> +
> + struct special_alt *iter = group;
> + for (;;) {
> + unsigned int len = max(iter->orig_len, alt->orig_len);
> + iter->orig_len = alt->orig_len = len;
> +
> + if (iter == prev)
> + break;
> +
> + iter = list_next_entry(iter, list);
> + }
> +
> + } else group = alt;
> +
> + prev = alt;
> +
> switch (feature) {
> case X86_FEATURE_SMAP:
> /*
> diff --git a/tools/objtool/special.c b/tools/objtool/special.c
> index 91b1950f5bd8..097a69db82a0 100644
> --- a/tools/objtool/special.c
> +++ b/tools/objtool/special.c
> @@ -84,6 +84,14 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
> entry->new_len);
> }
>
> + orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
> + if (!orig_reloc) {
> + WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
> + return -1;
> + }
> +
> + reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
> +
> if (entry->feature) {
> unsigned short feature;
>
> @@ -94,14 +102,6 @@ static int get_alt_entry(struct elf *elf, const struct special_entry *entry,
> arch_handle_alternative(feature, alt);
> }
>
> - orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig);
> - if (!orig_reloc) {
> - WARN_FUNC("can't find orig reloc", sec, offset + entry->orig);
> - return -1;
> - }
> -
> - reloc_to_sec_off(orig_reloc, &alt->orig_sec, &alt->orig_off);
> -
> if (!entry->group || alt->new_len) {
> new_reloc = find_reloc_by_dest(elf, sec, offset + entry->new);
> if (!new_reloc) {

2024-05-31 14:34:15

by Borislav Petkov

[permalink] [raw]
Subject: Re: [PATCH 02/14] x86/alternatives: Add nested alternatives macros

On Fri, May 31, 2024 at 04:30:31PM +0200, Uros Bizjak wrote:
> Please don't resurrect the %P modifier, use %c instead.
>
> > +
> > /*
> > * Like alternative_call, but there are two features and respective functions.
> > * If CPU has feature2, function2 is used.
> > @@ -307,6 +376,15 @@ static inline int alternatives_text_reserved(void *start, void *end)
> > : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
> > [new2] "i" (newfunc2), ## input)
> > +#define n_alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
> > + output, input...) \
> > + asm_inline volatile (N_ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1,\
> > + "call %P[new2]", ft_flags2) \
> > + : output, ASM_CALL_CONSTRAINT \
> > + : [old] "i" (oldfunc), [new1] "i" (newfunc1), \
> > + [new2] "i" (newfunc2), ## input)
>
> Here too, %P should be avoided in favor of %c.

Yeah, will do.

We probably should update our patch checking scripts to catch such
things in the future.

/me adds to todo.

Thx.


--
Regards/Gruss,
Boris.

https://people.kernel.org/tglx/notes-about-netiquette

2024-05-31 14:38:46

by Borislav Petkov

[permalink] [raw]
Subject: Re: [PATCH 02/14] x86/alternatives: Add nested alternatives macros

On Fri, May 31, 2024 at 04:30:31PM +0200, Uros Bizjak wrote:
> Please don't resurrect the %P modifier, use %c instead.

Btw, out of curiosity, is %P being phased out?

I'm looking at

41cd2e1ee96e ("x86/asm: Use %c/%n instead of %P operand modifier in asm templates")

and yeah, %c is the generic one while %P is the x86-specific one but
phasing latter out is probably going to take a bunch of gcc releases...

Or is it something else entirely?

Thx.

--
Regards/Gruss,
Boris.

https://people.kernel.org/tglx/notes-about-netiquette

2024-05-31 15:15:12

by Uros Bizjak

[permalink] [raw]
Subject: Re: [PATCH 02/14] x86/alternatives: Add nested alternatives macros



Borislav Petkov je 31. 05. 24 ob 16:36 napisal:
> On Fri, May 31, 2024 at 04:30:31PM +0200, Uros Bizjak wrote:
>> Please don't resurrect the %P modifier, use %c instead.
>
> Btw, out of curiosity, is %P being phased out?

No ;)

> I'm looking at
>
> 41cd2e1ee96e ("x86/asm: Use %c/%n instead of %P operand modifier in asm templates")
>
> and yeah, %c is the generic one while %P is the x86-specific one but
> phasing latter out is probably going to take a bunch of gcc releases...

The intention of '%P' operand modifier is primarily to be used with PIC,
where:

'P' If used for a function, print the PLT
suffix and generate PIC code. For
example, emit 'foo@PLT' instead of 'foo'
for the function foo(). If used for a
constant, drop all syntax-specific
prefixes and issue the bare constant. See
'p' above.

the fact that is handles constants is due to historic reasons.

The '%c' operand modifier will also check its operand that it is indeed
an immediate integer operand, so should be used with 'i' operand constraint:

'c' Require a constant operand and print the
constant expression with no punctuation.

Uros.

2024-05-31 17:01:34

by Brian Gerst

[permalink] [raw]
Subject: Re: [PATCH 10/14] x86/alternative: Convert ALTERNATIVE_3()

On Fri, May 31, 2024 at 8:41 AM Borislav Petkov <[email protected]> wrote:
>
> From: "Borislav Petkov (AMD)" <[email protected]>
>
> Fixup label numbering too as the new macros have new label numbers.
>
> Signed-off-by: Borislav Petkov (AMD) <[email protected]>
> ---
> arch/x86/include/asm/alternative.h | 24 ++++--------------------
> arch/x86/kernel/fpu/xstate.h | 4 ++--
> 2 files changed, 6 insertions(+), 22 deletions(-)
>
> diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
> index 73ee18705ef1..0df99855e003 100644
> --- a/arch/x86/include/asm/alternative.h
> +++ b/arch/x86/include/asm/alternative.h
> @@ -277,26 +277,10 @@ static inline int alternatives_text_reserved(void *start, void *end)
> N_ALTERNATIVE_2(oldinstr, newinstr_no, X86_FEATURE_ALWAYS, \
> newinstr_yes, ft_flags)
>
> -#define ALTERNATIVE_3(oldinsn, newinsn1, ft_flags1, newinsn2, ft_flags2, \
> - newinsn3, ft_flags3) \
> - OLDINSTR_3(oldinsn, 1, 2, 3) \
> - ".pushsection .altinstructions,\"a\"\n" \
> - ALTINSTR_ENTRY(ft_flags1, 1) \
> - ALTINSTR_ENTRY(ft_flags2, 2) \
> - ALTINSTR_ENTRY(ft_flags3, 3) \
> - ".popsection\n" \
> - ".pushsection .altinstr_replacement, \"ax\"\n" \
> - ALTINSTR_REPLACEMENT(newinsn1, 1) \
> - ALTINSTR_REPLACEMENT(newinsn2, 2) \
> - ALTINSTR_REPLACEMENT(newinsn3, 3) \
> - ".popsection\n"
> -
> -
> -#define N_ALTERNATIVE_3(oldinst, newinst1, flag1, newinst2, flag2, \
> - newinst3, flag3) \
> - N_ALTERNATIVE(N_ALTERNATIVE_2(oldinst, newinst1, flag1, newinst2, flag2), \
> - newinst3, flag3)
> -
> +#define ALTERNATIVE_3(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2, \
> + newinstr3, ft_flags3) \
> + N_ALTERNATIVE(N_ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2), \
> + newinstr3, ft_flags3)
> /*
> * Alternative instructions for different CPU types or capabilities.
> *
> diff --git a/arch/x86/kernel/fpu/xstate.h b/arch/x86/kernel/fpu/xstate.h
> index 05df04f39628..4fe8501efc6c 100644
> --- a/arch/x86/kernel/fpu/xstate.h
> +++ b/arch/x86/kernel/fpu/xstate.h
> @@ -108,7 +108,7 @@ static inline u64 xfeatures_mask_independent(void)
> *
> * We use XSAVE as a fallback.
> *
> - * The 661 label is defined in the ALTERNATIVE* macros as the address of the
> + * The 771 label is defined in the ALTERNATIVE* macros as the address of the
> * original instruction which gets replaced. We need to use it here as the
> * address of the instruction where we might get an exception at.
> */
> @@ -120,7 +120,7 @@ static inline u64 xfeatures_mask_independent(void)
> "\n" \
> "xor %[err], %[err]\n" \
> "3:\n" \
> - _ASM_EXTABLE_TYPE_REG(661b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
> + _ASM_EXTABLE_TYPE_REG(771b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
> : [err] "=r" (err) \
> : "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
> : "memory")
> --
> 2.43.0
>
>

Just add a label at the start of this macro, so it doesn't depend on
the internal labels of ALTERNATIVE(). Something like:
asm volatile("1:" ALTERNATIVE_3(XSAVE, \
...
_ASM_EXTABLE_TYPE_REG(1b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
...


Brian Gerst

2024-05-31 18:09:28

by Borislav Petkov

[permalink] [raw]
Subject: Re: [PATCH 02/14] x86/alternatives: Add nested alternatives macros

On Fri, May 31, 2024 at 05:10:08PM +0200, Uros Bizjak wrote:
> The intention of '%P' operand modifier is primarily to be used with PIC,
> where ...

Ok, I'll fold in the following. I'd like to have this documented
somewhere in the tree too.

Thx.

diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index f02867092a49..4973edbe9289 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -236,9 +236,18 @@ static inline int alternatives_text_reserved(void *start, void *end)
asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
: output : "i" (0), ## input)

-/* Like alternative_io, but for replacing a direct call with another one. */
+/*
+ * Like alternative_io, but for replacing a direct call with another one.
+ *
+ * Use the %c operand modifier which is the generic way to print a bare
+ * constant expression with all syntax-specific punctuation omitted. %P
+ * is the x86-specific variant which can handle constants too, for
+ * historical reasons, but it should be used primarily for PIC
+ * references: i.e., if used for a function, it would add the PLT
+ * suffix.
+ */
#define alternative_call(oldfunc, newfunc, ft_flags, output, input...) \
- asm_inline volatile(ALTERNATIVE("call %P[old]", "call %P[new]", ft_flags) \
+ asm_inline volatile(ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
: output : [old] "i" (oldfunc), [new] "i" (newfunc), ## input)

/*
@@ -249,8 +258,8 @@ static inline int alternatives_text_reserved(void *start, void *end)
*/
#define alternative_call_2(oldfunc, newfunc1, ft_flags1, newfunc2, ft_flags2, \
output, input...) \
- asm_inline volatile(ALTERNATIVE_2("call %P[old]", "call %P[new1]", ft_flags1, \
- "call %P[new2]", ft_flags2) \
+ asm_inline volatile(ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
+ "call %c[new2]", ft_flags2) \
: output, ASM_CALL_CONSTRAINT \
: [old] "i" (oldfunc), [new1] "i" (newfunc1), \
[new2] "i" (newfunc2), ## input)

--
Regards/Gruss,
Boris.

https://people.kernel.org/tglx/notes-about-netiquette

2024-06-01 09:11:21

by Borislav Petkov

[permalink] [raw]
Subject: Re: [PATCH 10/14] x86/alternative: Convert ALTERNATIVE_3()

On Fri, May 31, 2024 at 01:00:55PM -0400, Brian Gerst wrote:
> Just add a label at the start of this macro, so it doesn't depend on
> the internal labels of ALTERNATIVE(). Something like:
> asm volatile("1:" ALTERNATIVE_3(XSAVE, \
> ...
> _ASM_EXTABLE_TYPE_REG(1b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
> ...

Thanks for the cool idea - that use of the asm label outside of the
macro was really nasty but I didn't think of that. Cool.

And yap, it looks good:

------
#APP
# 188 "arch/x86/kernel/fpu/xstate.h" 1
1: # ALT: oldinstr

<--- the outer label 1:

771:
# ALT: oldinstr
771:
# ALT: oldinstr
771:
.byte 0x48, 0x0f,0xae,0x27
772:
# ALT: padding
.skip -(((775f-774f)-(772b-771b)) > 0) * ((775f-774f)-(772b-771b)),0x90

...

triple-alternative gunk

...

775:
.popsection

xor %edi, %edi # err
3:
.pushsection "__ex_table","a"
------

Yap, and boots in the guest.

I'll fold in the below:

diff --git a/arch/x86/kernel/fpu/xstate.h b/arch/x86/kernel/fpu/xstate.h
index 70903c12a911..2ee0b9c53dcc 100644
--- a/arch/x86/kernel/fpu/xstate.h
+++ b/arch/x86/kernel/fpu/xstate.h
@@ -106,21 +106,17 @@ static inline u64 xfeatures_mask_independent(void)
* Otherwise, if XSAVEOPT is enabled, XSAVEOPT replaces XSAVE because XSAVEOPT
* supports modified optimization which is not supported by XSAVE.
*
- * We use XSAVE as a fallback.
- *
- * The 771 label is defined in the ALTERNATIVE* macros as the address of the
- * original instruction which gets replaced. We need to use it here as the
- * address of the instruction where we might get an exception at.
+ * Use XSAVE as a fallback.
*/
#define XSTATE_XSAVE(st, lmask, hmask, err) \
- asm volatile(ALTERNATIVE_3(XSAVE, \
+ asm volatile("1: " ALTERNATIVE_3(XSAVE, \
XSAVEOPT, X86_FEATURE_XSAVEOPT, \
XSAVEC, X86_FEATURE_XSAVEC, \
XSAVES, X86_FEATURE_XSAVES) \
"\n" \
"xor %[err], %[err]\n" \
"3:\n" \
- _ASM_EXTABLE_TYPE_REG(771b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
+ _ASM_EXTABLE_TYPE_REG(1b, 3b, EX_TYPE_EFAULT_REG, %[err]) \
: [err] "=r" (err) \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")
@@ -130,11 +126,11 @@ static inline u64 xfeatures_mask_independent(void)
* XSAVE area format.
*/
#define XSTATE_XRESTORE(st, lmask, hmask) \
- asm volatile(ALTERNATIVE(XRSTOR, \
+ asm volatile("1: " ALTERNATIVE(XRSTOR, \
XRSTORS, X86_FEATURE_XSAVES) \
"\n" \
"3:\n" \
- _ASM_EXTABLE_TYPE(771b, 3b, EX_TYPE_FPU_RESTORE) \
+ _ASM_EXTABLE_TYPE(1b, 3b, EX_TYPE_FPU_RESTORE) \
: \
: "D" (st), "m" (*st), "a" (lmask), "d" (hmask) \
: "memory")

--
Regards/Gruss,
Boris.

https://people.kernel.org/tglx/notes-about-netiquette