GNU Linux-libre 4.19.245-gnu1
[releases.git] / arch / powerpc / kvm / book3s_64_slb.S
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License, version 2, as
4  * published by the Free Software Foundation.
5  *
6  * This program is distributed in the hope that it will be useful,
7  * but WITHOUT ANY WARRANTY; without even the implied warranty of
8  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9  * GNU General Public License for more details.
10  *
11  * You should have received a copy of the GNU General Public License
12  * along with this program; if not, write to the Free Software
13  * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
14  *
15  * Copyright SUSE Linux Products GmbH 2009
16  *
17  * Authors: Alexander Graf <agraf@suse.de>
18  */
19
20 #include <asm/asm-compat.h>
21 #include <asm/feature-fixups.h>
22
23 #define SHADOW_SLB_ENTRY_LEN    0x10
24 #define OFFSET_ESID(x)          (SHADOW_SLB_ENTRY_LEN * x)
25 #define OFFSET_VSID(x)          ((SHADOW_SLB_ENTRY_LEN * x) + 8)
26
27 /******************************************************************************
28  *                                                                            *
29  *                               Entry code                                   *
30  *                                                                            *
31  *****************************************************************************/
32
33 .macro LOAD_GUEST_SEGMENTS
34
35         /* Required state:
36          *
37          * MSR = ~IR|DR
38          * R13 = PACA
39          * R1 = host R1
40          * R2 = host R2
41          * R3 = shadow vcpu
42          * all other volatile GPRS = free except R4, R6
43          * SVCPU[CR]  = guest CR
44          * SVCPU[XER] = guest XER
45          * SVCPU[CTR] = guest CTR
46          * SVCPU[LR]  = guest LR
47          */
48
49 BEGIN_FW_FTR_SECTION
50
51         /* Declare SLB shadow as 0 entries big */
52
53         ld      r11, PACA_SLBSHADOWPTR(r13)
54         li      r8, 0
55         stb     r8, 3(r11)
56
57 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
58
59         /* Flush SLB */
60
61         li      r10, 0
62         slbmte  r10, r10
63         slbia
64
65         /* Fill SLB with our shadow */
66
67         lbz     r12, SVCPU_SLB_MAX(r3)
68         mulli   r12, r12, 16
69         addi    r12, r12, SVCPU_SLB
70         add     r12, r12, r3
71
72         /* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
73         li      r11, SVCPU_SLB
74         add     r11, r11, r3
75
76 slb_loop_enter:
77
78         ld      r10, 0(r11)
79
80         andis.  r9, r10, SLB_ESID_V@h
81         beq     slb_loop_enter_skip
82
83         ld      r9, 8(r11)
84         slbmte  r9, r10
85
86 slb_loop_enter_skip:
87         addi    r11, r11, 16
88         cmpd    cr0, r11, r12
89         blt     slb_loop_enter
90
91 slb_do_enter:
92
93 .endm
94
95 /******************************************************************************
96  *                                                                            *
97  *                               Exit code                                    *
98  *                                                                            *
99  *****************************************************************************/
100
101 .macro LOAD_HOST_SEGMENTS
102
103         /* Register usage at this point:
104          *
105          * R1         = host R1
106          * R2         = host R2
107          * R12        = exit handler id
108          * R13        = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]
109          * SVCPU.*    = guest *
110          * SVCPU[CR]  = guest CR
111          * SVCPU[XER] = guest XER
112          * SVCPU[CTR] = guest CTR
113          * SVCPU[LR]  = guest LR
114          *
115          */
116
117         /* Remove all SLB entries that are in use. */
118
119         li      r0, 0
120         slbmte  r0, r0
121         slbia
122
123         /* Restore bolted entries from the shadow */
124
125         ld      r11, PACA_SLBSHADOWPTR(r13)
126
127 BEGIN_FW_FTR_SECTION
128
129         /* Declare SLB shadow as SLB_NUM_BOLTED entries big */
130
131         li      r8, SLB_NUM_BOLTED
132         stb     r8, 3(r11)
133
134 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
135
136         /* Manually load all entries from shadow SLB */
137
138         li      r8, SLBSHADOW_SAVEAREA
139         li      r7, SLBSHADOW_SAVEAREA + 8
140
141         .rept   SLB_NUM_BOLTED
142         LDX_BE  r10, r11, r8
143         cmpdi   r10, 0
144         beq     1f
145         LDX_BE  r9, r11, r7
146         slbmte  r9, r10
147 1:      addi    r7, r7, SHADOW_SLB_ENTRY_LEN
148         addi    r8, r8, SHADOW_SLB_ENTRY_LEN
149         .endr
150
151         isync
152         sync
153
154 slb_do_exit:
155
156 .endm