2 * Copyright (C) 2015 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #include <linux/arm-smccc.h>
19 #include <linux/linkage.h>
21 #include <asm/alternative.h>
22 #include <asm/assembler.h>
23 #include <asm/cpufeature.h>
24 #include <asm/kvm_arm.h>
25 #include <asm/kvm_asm.h>
26 #include <asm/kvm_mmu.h>
28 .macro save_caller_saved_regs_vect
29 stp x0, x1, [sp, #-16]!
30 stp x2, x3, [sp, #-16]!
31 stp x4, x5, [sp, #-16]!
32 stp x6, x7, [sp, #-16]!
33 stp x8, x9, [sp, #-16]!
34 stp x10, x11, [sp, #-16]!
35 stp x12, x13, [sp, #-16]!
36 stp x14, x15, [sp, #-16]!
37 stp x16, x17, [sp, #-16]!
40 .macro restore_caller_saved_regs_vect
41 ldp x16, x17, [sp], #16
42 ldp x14, x15, [sp], #16
43 ldp x12, x13, [sp], #16
44 ldp x10, x11, [sp], #16
53 .pushsection .hyp.text, "ax"
57 * Shuffle the parameters before calling the function
58 * pointed to in x0. Assumes parameters in x[1,2,3].
72 * We used to rely on having an exception return to get
73 * an implicit isb. In the E2H case, we don't have it anymore.
74 * rather than changing all the leaf functions, just do it here
75 * before returning to the rest of the kernel.
79 ENDPROC(__vhe_hyp_call)
82 * Compute the idmap address of __kvm_hyp_reset based on the idmap
83 * start passed as a parameter, and jump there.
85 * x0: HYP phys_idmap_start
87 ENTRY(__kvm_hyp_teardown)
89 adr_l x3, __kvm_hyp_reset
91 /* insert __kvm_hyp_reset()s offset into phys_idmap_start */
92 bfi x4, x3, #0, #PAGE_SHIFT
94 ENDPROC(__kvm_hyp_teardown)
96 el1_sync: // Guest trapped into EL2
97 stp x0, x1, [sp, #-16]!
100 lsr x0, x0, #ESR_ELx_EC_SHIFT
101 cmp x0, #ESR_ELx_EC_HVC64
102 ccmp x0, #ESR_ELx_EC_HVC32, #4, ne
105 mrs x1, vttbr_el2 // If vttbr is valid, the guest
106 cbnz x1, el1_hvc_guest // called HVC
108 /* Here, we're pretty sure the host called HVC. */
109 ldp x0, x1, [sp], #16
111 cmp x0, #HVC_GET_VECTORS
118 * Perform the EL2 call
127 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
128 * The workaround has already been applied on the host,
129 * so let's quickly get back to the guest. We don't bother
130 * restoring x1, as it can be clobbered anyway.
132 ldr x1, [sp] // Guest's x0
133 eor w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
136 /* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
137 eor w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
138 ARM_SMCCC_ARCH_WORKAROUND_2)
141 #ifdef CONFIG_ARM64_SSBD
142 alternative_cb arm64_enable_wa2_handling
146 ldr x0, [x2, #VCPU_WORKAROUND_FLAGS]
148 // Sanitize the argument and update the guest flags
149 ldr x1, [sp, #8] // Guest's x1
150 clz w1, w1 // Murphy's device:
151 lsr w1, w1, #5 // w1 = !!w1 without using
152 eor w1, w1, #1 // the flags...
153 bfi x0, x1, #VCPU_WORKAROUND_2_FLAG_SHIFT, #1
154 str x0, [x2, #VCPU_WORKAROUND_FLAGS]
156 /* Check that we actually need to perform the call */
157 hyp_ldr_this_cpu x0, arm64_ssbd_callback_required, x2
160 mov w0, #ARM_SMCCC_ARCH_WORKAROUND_2
163 /* Don't leak data from the SMC call */
179 lsr x0, x0, #ESR_ELx_EC_SHIFT
185 /* Guest accessed VFP/SIMD registers, save host, restore Guest */
186 cmp x0, #ESR_ELx_EC_FP_ASIMD
187 b.eq __fpsimd_guest_restore
189 mov x0, #ARM_EXCEPTION_TRAP
193 stp x0, x1, [sp, #-16]!
195 mov x0, #ARM_EXCEPTION_IRQ
199 stp x0, x1, [sp, #-16]!
201 mov x0, #ARM_EXCEPTION_EL1_SERROR
205 save_caller_saved_regs_vect
206 stp x29, x30, [sp, #-16]!
207 bl kvm_unexpected_el2_exception
208 ldp x29, x30, [sp], #16
209 restore_caller_saved_regs_vect
214 save_caller_saved_regs_vect
215 stp x29, x30, [sp, #-16]!
217 bl kvm_unexpected_el2_exception
219 ldp x29, x30, [sp], #16
220 restore_caller_saved_regs_vect
224 ENTRY(__hyp_do_panic)
225 mov lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
231 ENDPROC(__hyp_do_panic)
238 .macro invalid_vector label, target = __hyp_panic
245 /* None of these should ever happen */
246 invalid_vector el2t_sync_invalid
247 invalid_vector el2t_irq_invalid
248 invalid_vector el2t_fiq_invalid
249 invalid_vector el2t_error_invalid
250 invalid_vector el2h_irq_invalid
251 invalid_vector el2h_fiq_invalid
252 invalid_vector el1_sync_invalid
253 invalid_vector el1_irq_invalid
254 invalid_vector el1_fiq_invalid
260 ENTRY(__kvm_hyp_vector)
261 ventry el2t_sync_invalid // Synchronous EL2t
262 ventry el2t_irq_invalid // IRQ EL2t
263 ventry el2t_fiq_invalid // FIQ EL2t
264 ventry el2t_error_invalid // Error EL2t
266 ventry el2_sync // Synchronous EL2h
267 ventry el2h_irq_invalid // IRQ EL2h
268 ventry el2h_fiq_invalid // FIQ EL2h
269 ventry el2_error // Error EL2h
271 ventry el1_sync // Synchronous 64-bit EL1
272 ventry el1_irq // IRQ 64-bit EL1
273 ventry el1_fiq_invalid // FIQ 64-bit EL1
274 ventry el1_error // Error 64-bit EL1
276 ventry el1_sync // Synchronous 32-bit EL1
277 ventry el1_irq // IRQ 32-bit EL1
278 ventry el1_fiq_invalid // FIQ 32-bit EL1
279 ventry el1_error // Error 32-bit EL1
280 ENDPROC(__kvm_hyp_vector)