1// SPDX-License-Identifier: GPL-2.0-or-later 2/* 3 * Copyright (c) 2023 Huawei Device Co., Ltd. 4 */ 5 6#include "asm/asm_pointer_auth_context.h" 7 8#include <asm/asm-offsets.h> 9#include <asm/sysreg.h> 10#include <linux/linkage.h> 11 12#ifdef CONFIG_COMPAT 13 /* Obtain the regs of compat task to sign or authenticate. */ 14 .macro ldr_compat_pt_regs 15 mov x1, #0 16 mov x2, #0 17 /* load lr, sp, pc, pstate of compat task */ 18 ldr x3, [x0, #S_COMPAT_LR] 19 ldr x4, [x0, #S_COMPAT_SP] 20 ldr x5, [x0, #S_PC] 21 ldr x6, [x0, #S_PSTATE] 22 .endm 23#endif 24 25 /* Obtain the regs of task to sign or authenticate. */ 26 .macro ldr_pt_regs 27 /* load x16, x17, lr, sp, pc, pstate of task */ 28 ldp x1, x2, [x0, #S_X16] 29 ldr x3, [x0, #S_LR] 30 ldr x4, [x0, #S_SP] 31 ldr x5, [x0, #S_PC] 32 ldr x6, [x0, #S_PSTATE] 33 .endm 34 35/* 36 * Register sign_thread_context for AArch64. 37 * void sign_thread_context(struct cpu_context *cpu_context) 38 * On entry: 39 * x0: the pointer of cpu_context 40 */ 41SYM_FUNC_START(sign_thread_context) 42 mrs x9, daif 43 msr daifset, #0x2 44 ldr x1, [x0, #CPU_CONTEXT_PC] 45 ldr x2, [x0, #CPU_CONTEXT_SP] 46 sign_thread_context_common 47 msr daif, x9 48 ret 49SYM_FUNC_END(sign_thread_context) 50 51/* 52 * Register auth_thread_context for AArch64. 53 * void auth_thread_context(struct cpu_context *cpu_context) 54 * On entry: 55 * x0: the pointer of cpu_context 56 */ 57SYM_FUNC_START(auth_thread_context) 58 stp x29, x30, [sp, #-16]! 59 mov x29, sp 60 mrs x9, daif 61 msr daifset, #0x2 62 ldr x1, [x0, #CPU_CONTEXT_PC] 63 ldr x2, [x0, #CPU_CONTEXT_SP] 64 auth_thread_context_common 65 msr daif, x9 66 ldp x29, x30, [sp], #16 67 ret 68SYM_FUNC_END(auth_thread_context) 69 70/* 71 * Register set_exception_context_register_asm for AArch64. 72 * int set_exception_context_register_asm(struct pt_regs *regs, int offset, u64 val); 73 * On entry: 74 * x0: the regs of task 75 * x1: the offset of member in pt_regs struct 76 * x2: the value need to be update 77 */ 78SYM_FUNC_START(set_exception_context_register_asm) 79 stp x29, x30, [sp, #-16]! 80 mov x29, sp 81 mov x9, x1 82 mov x10, x2 83 mrs x11, daif 84 msr daifset, #0x2 85 ldr_pt_regs 86 mov x12, x1 87 mov x13, x2 88 auth_exception_context_common x0, x12, x13 89 cmp x9, #S_LR 90 b.eq .Lupdate_lr 91 b.ls .Lchoose_lower 92 cmp x9, #S_PC 93 b.eq .Lupdate_pc 94 b.cc .Lupdate_sp 95 cmp x9, #S_PSTATE 96 b.eq .Lupdate_pstate 97.Lerror_return: 98 /* invalid value: return -EINVAL */ 99 mov x0, #-22 100 b .Lreturn 101.Lchoose_lower: 102 cmp x9, #S_X16 103 b.eq .Lupdate_x16 104 b.hi .Lupdate_x17 105 b .Lerror_return 106.Lupdate_pstate: 107 mov x6, x10 108.Lupdate_done: 109 str x10, [x0, x9] 110 sign_exception_context_common 111.Lreturn: 112 mov x0, #0 113 msr daif, x11 114 ldp x29, x30, [sp], #16 115 ret 116 117.Lupdate_x16: 118 mov x1, x10 119 b .Lupdate_done 120.Lupdate_x17: 121 mov x2, x10 122 b .Lupdate_done 123.Lupdate_lr: 124 mov x3, x10 125 b .Lupdate_done 126.Lupdate_sp: 127 mov x4, x10 128 b .Lupdate_done 129.Lupdate_pc: 130 mov x5, x10 131 b .Lupdate_done 132SYM_FUNC_END(set_exception_context_register_asm) 133 134#ifdef CONFIG_COMPAT 135/* 136 * Register set_compat_exception_context_register_asm for AArch64. 137 * int set_compat_exception_context_register_asm(struct pt_regs *regs, int offset, u64 val); 138 * On entry: 139 * x0: the regs of compat task 140 * x1: the offset of member in pt_regs struct 141 * x2: the value need to be update 142 */ 143SYM_FUNC_START(set_compat_exception_context_register_asm) 144 stp x29, x30, [sp, #-16]! 145 mov x29, sp 146 mov x9, x1 147 mov x10, x2 148 mrs x11, daif 149 msr daifset, #0x2 150 ldr_compat_pt_regs 151 mov x12, x1 152 mov x13, x2 153 auth_exception_context_common x0, x12, x13 154 cmp x9, #S_COMPAT_LR 155 b.eq .Lupdate_compat_lr 156 b.ls .Lcompat_choose_lower 157 cmp x9, #S_PSTATE 158 b.eq .Lupdate_compat_pstate 159 b.cc .Lupdate_compat_pc 160.Lcompat_error_return: 161 /* invalid value: return -EINVAL */ 162 mov x0, #-22 163 b .Lcompat_return 164.Lcompat_choose_lower: 165 cmp x9, #S_COMPAT_SP 166 b.eq .Lupdate_compat_sp 167 b .Lcompat_error_return 168.Lupdate_compat_pstate: 169 mov x6, x10 170.Lcompat_update_done: 171 str x10, [x0, x9] 172 sign_exception_context_common 173.Lcompat_return: 174 mov x0, #0 175 msr daif, x11 176 ldp x29, x30, [sp], #16 177 ret 178 179.Lupdate_compat_lr: 180 mov x3, x10 181 b .Lcompat_update_done 182.Lupdate_compat_sp: 183 mov x4, x10 184 b .Lcompat_update_done 185.Lupdate_compat_pc: 186 mov x5, x10 187 b .Lcompat_update_done 188SYM_FUNC_END(set_compat_exception_context_register_asm) 189#endif 190 191/* 192 * Register sign_exception_context_asm for AArch64. 193 * void sign_exception_context_asm(struct pt_regs *regs); 194 * On entry: 195 * x0: the regs of task 196 */ 197SYM_FUNC_START(sign_exception_context_asm) 198 ldr_pt_regs 199 sign_exception_context_common 200 ret 201SYM_FUNC_END(sign_exception_context_asm) 202 203/* 204 * Register auth_exception_context_asm for AArch64. 205 * void auth_exception_context_asm(struct pt_regs *regs); 206 * On entry: 207 * x0: the regs of task 208 */ 209SYM_FUNC_START(auth_exception_context_asm) 210 stp x29, x30, [sp, #-16]! 211 mov x29, sp 212 ldr_pt_regs 213 auth_exception_context_common 214 ldp x29, x30, [sp], #16 215 ret 216SYM_FUNC_END(auth_exception_context_asm) 217 218#ifdef CONFIG_COMPAT 219/* 220 * Register sign_compat_exception_context_asm for AArch64. 221 * void sign_compat_exception_context_asm(struct pt_regs *regs); 222 * On entry: 223 * x0: the regs of compat task 224 */ 225SYM_FUNC_START(sign_compat_exception_context_asm) 226 ldr_compat_pt_regs 227 sign_exception_context_common 228 ret 229SYM_FUNC_END(sign_compat_exception_context_asm) 230 231/* 232 * Register auth_compat_exception_context_asm for AArch64. 233 * void auth_compat_exception_context_asm(struct pt_regs *regs); 234 * On entry: 235 * x0: the regs of compat task 236 */ 237SYM_FUNC_START(auth_compat_exception_context_asm) 238 stp x29, x30, [sp, #-16]! 239 mov x29, sp 240 ldr_compat_pt_regs 241 auth_exception_context_common 242 ldp x29, x30, [sp], #16 243 ret 244SYM_FUNC_END(auth_compat_exception_context_asm) 245#endif 246 247