1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/static_call.h>
3 #include <linux/memory.h>
5 #include <asm/text-patching.h>
8 CALL = 0, /* site call */
9 NOP = 1, /* site cond-call */
10 JMP = 2, /* tramp / site tail-call */
11 RET = 3, /* tramp / site cond-tail-call */
15 * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
16 * that there is no false-positive trampoline identification while also being a
19 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
22 * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax
24 static const u8 xor5rax[] = { 0x2e, 0x2e, 0x2e, 0x31, 0xc0 };
26 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
28 static void __ref __static_call_transform(void *insn, enum insn_type type,
29 void *func, bool modinit)
31 const void *emulate = NULL;
32 int size = CALL_INSN_SIZE;
37 code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
38 if (func == &__static_call_return0) {
50 code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
54 if (cpu_feature_enabled(X86_FEATURE_RETHUNK))
55 code = text_gen_insn(JMP32_INSN_OPCODE, insn, &__x86_return_thunk);
61 if (memcmp(insn, code, size) == 0)
64 if (system_state == SYSTEM_BOOTING || modinit)
65 return text_poke_early(insn, code, size);
67 text_poke_bp(insn, code, size, emulate);
70 static void __static_call_validate(void *insn, bool tail)
72 u8 opcode = *(u8 *)insn;
75 if (opcode == JMP32_INSN_OPCODE ||
76 opcode == RET_INSN_OPCODE)
79 if (opcode == CALL_INSN_OPCODE ||
80 !memcmp(insn, x86_nops[5], 5) ||
81 !memcmp(insn, xor5rax, 5))
86 * If we ever trigger this, our text is corrupt, we'll probably not live long.
88 WARN_ONCE(1, "unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
91 static inline enum insn_type __sc_insn(bool null, bool tail)
94 * Encode the following table without branches:
97 * -----+-------+------
103 return 2*tail + null;
106 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
108 mutex_lock(&text_mutex);
111 __static_call_validate(tramp, true);
112 __static_call_transform(tramp, __sc_insn(!func, true), func, false);
115 if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
116 __static_call_validate(site, tail);
117 __static_call_transform(site, __sc_insn(!func, tail), func, false);
120 mutex_unlock(&text_mutex);
122 EXPORT_SYMBOL_GPL(arch_static_call_transform);
124 #ifdef CONFIG_RETHUNK
126 * This is called by apply_returns() to fix up static call trampolines,
127 * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
128 * having a return trampoline.
130 * The problem is that static_call() is available before determining
131 * X86_FEATURE_RETHUNK and, by implication, running alternatives.
133 * This means that __static_call_transform() above can have overwritten the
134 * return trampoline and we now need to fix things up to be consistent.
136 bool __static_call_fixup(void *tramp, u8 op, void *dest)
138 unsigned long addr = (unsigned long)tramp;
140 * Not all .return_sites are a static_call trampoline (most are not).
141 * Check if the 3 bytes after the return are still kernel text, if not,
142 * then this definitely is not a trampoline and we need not worry
145 * This avoids the memcmp() below tripping over pagefaults etc..
147 if (((addr >> PAGE_SHIFT) != ((addr + 7) >> PAGE_SHIFT)) &&
148 !kernel_text_address(addr + 7))
151 if (memcmp(tramp+5, tramp_ud, 3)) {
152 /* Not a trampoline site, not our problem. */
156 mutex_lock(&text_mutex);
157 if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
158 __static_call_transform(tramp, RET, NULL, true);
159 mutex_unlock(&text_mutex);