1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  *  linux/arch/arm/vfp/vfphw.S
4  *
5  *  Copyright (C) 2004 ARM Limited.
6  *  Written by Deep Blue Solutions Limited.
7  */
8 #include <linux/init.h>
9 #include <linux/linkage.h>
10 #include <asm/thread_info.h>
11 #include <asm/vfpmacros.h>
12 #include <linux/kern_levels.h>
13 #include <asm/assembler.h>
14 #include <asm/asm-offsets.h>
15 
16 	.macro  DBGSTR1, str, arg
17 #ifdef DEBUG
18 	stmfd	sp!, {r0-r3, ip, lr}
19 	mov	r1, \arg
20 	ldr	r0, =1f
21 	bl	_printk
22 	ldmfd	sp!, {r0-r3, ip, lr}
23 
24 	.pushsection .rodata, "a"
25 1:	.ascii	KERN_DEBUG "VFP: \str\n"
26 	.byte	0
27 	.previous
28 #endif
29 	.endm
30 
31 ENTRY(vfp_load_state)
32 	@ Load the current VFP state
33 	@ r0 - load location
34 	@ returns FPEXC
35 	DBGSTR1	"load VFP state %p", r0
36 					@ Load the saved state back into the VFP
37 	VFPFLDMIA r0, r1		@ reload the working registers while
38 					@ FPEXC is in a safe state
39 	ldmia	r0, {r0-r3}		@ load FPEXC, FPSCR, FPINST, FPINST2
40 	tst	r0, #FPEXC_EX		@ is there additional state to restore?
41 	beq	1f
42 	VFPFMXR	FPINST, r2		@ restore FPINST (only if FPEXC.EX is set)
43 	tst	r0, #FPEXC_FP2V		@ is there an FPINST2 to write?
44 	beq	1f
45 	VFPFMXR	FPINST2, r3		@ FPINST2 if needed (and present)
46 1:
47 	VFPFMXR	FPSCR, r1		@ restore status
48 	ret	lr
49 ENDPROC(vfp_load_state)
50 
51 ENTRY(vfp_save_state)
52 	@ Save the current VFP state
53 	@ r0 - save location
54 	@ r1 - FPEXC
55 	DBGSTR1	"save VFP state %p", r0
56 	VFPFSTMIA r0, r2		@ save the working registers
57 	VFPFMRX	r2, FPSCR		@ current status
58 	tst	r1, #FPEXC_EX		@ is there additional state to save?
59 	beq	1f
60 	VFPFMRX	r3, FPINST		@ FPINST (only if FPEXC.EX is set)
61 	tst	r1, #FPEXC_FP2V		@ is there an FPINST2 to read?
62 	beq	1f
63 	VFPFMRX	r12, FPINST2		@ FPINST2 if needed (and present)
64 1:
65 	stmia	r0, {r1, r2, r3, r12}	@ save FPEXC, FPSCR, FPINST, FPINST2
66 	ret	lr
67 ENDPROC(vfp_save_state)
68 
69 	.macro	tbl_branch, base, tmp, shift
70 #ifdef CONFIG_THUMB2_KERNEL
71 	adr	\tmp, 1f
72 	add	\tmp, \tmp, \base, lsl \shift
73 	ret	\tmp
74 #else
75 	add	pc, pc, \base, lsl \shift
76 	mov	r0, r0
77 #endif
78 1:
79 	.endm
80 
81 ENTRY(vfp_get_float)
82 	tbl_branch r0, r3, #3
83 	.fpu	vfpv2
84 	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
85 1:	vmov	r0, s\dr
86 	ret	lr
87 	.org	1b + 8
88 	.endr
89 	.irp	dr,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31
90 1:	vmov	r0, s\dr
91 	ret	lr
92 	.org	1b + 8
93 	.endr
94 ENDPROC(vfp_get_float)
95 
96 ENTRY(vfp_put_float)
97 	tbl_branch r1, r3, #3
98 	.fpu	vfpv2
99 	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
100 1:	vmov	s\dr, r0
101 	ret	lr
102 	.org	1b + 8
103 	.endr
104 	.irp	dr,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31
105 1:	vmov	s\dr, r0
106 	ret	lr
107 	.org	1b + 8
108 	.endr
109 ENDPROC(vfp_put_float)
110 
111 ENTRY(vfp_get_double)
112 	tbl_branch r0, r3, #3
113 	.fpu	vfpv2
114 	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
115 1:	vmov	r0, r1, d\dr
116 	ret	lr
117 	.org	1b + 8
118 	.endr
119 #ifdef CONFIG_VFPv3
120 	@ d16 - d31 registers
121 	.fpu	vfpv3
122 	.irp	dr,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31
123 1:	vmov	r0, r1, d\dr
124 	ret	lr
125 	.org	1b + 8
126 	.endr
127 #endif
128 
129 	@ virtual register 16 (or 32 if VFPv3) for compare with zero
130 	mov	r0, #0
131 	mov	r1, #0
132 	ret	lr
133 ENDPROC(vfp_get_double)
134 
135 ENTRY(vfp_put_double)
136 	tbl_branch r2, r3, #3
137 	.fpu	vfpv2
138 	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
139 1:	vmov	d\dr, r0, r1
140 	ret	lr
141 	.org	1b + 8
142 	.endr
143 #ifdef CONFIG_VFPv3
144 	.fpu	vfpv3
145 	@ d16 - d31 registers
146 	.irp	dr,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31
147 1:	vmov	d\dr, r0, r1
148 	ret	lr
149 	.org	1b + 8
150 	.endr
151 #endif
152 ENDPROC(vfp_put_double)
153