1 | /* longjmp for PowerPC64. |
2 | Copyright (C) 1995-2024 Free Software Foundation, Inc. |
3 | This file is part of the GNU C Library. |
4 | |
5 | The GNU C Library is free software; you can redistribute it and/or |
6 | modify it under the terms of the GNU Lesser General Public |
7 | License as published by the Free Software Foundation; either |
8 | version 2.1 of the License, or (at your option) any later version. |
9 | |
10 | The GNU C Library is distributed in the hope that it will be useful, |
11 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | Lesser General Public License for more details. |
14 | |
15 | You should have received a copy of the GNU Lesser General Public |
16 | License along with the GNU C Library; if not, see |
17 | <https://www.gnu.org/licenses/>. */ |
18 | |
19 | #include <sysdep.h> |
20 | #include <pointer_guard.h> |
21 | #include <stap-probe.h> |
22 | #define _ASM |
23 | #define _SETJMP_H |
24 | #ifdef __NO_VMX__ |
25 | # include <novmxsetjmp.h> |
26 | #else |
27 | # include <jmpbuf-offsets.h> |
28 | #endif |
29 | |
30 | #ifndef __NO_VMX__ |
31 | .section ".toc" ,"aw" |
32 | .LC__dl_hwcap: |
33 | # ifdef SHARED |
34 | # if IS_IN (rtld) |
35 | /* Inside ld.so we use the local alias to avoid runtime GOT |
36 | relocations. */ |
37 | .tc _rtld_local_ro[TC],_rtld_local_ro |
38 | # else |
39 | .tc _rtld_global_ro[TC],_rtld_global_ro |
40 | # endif |
41 | # else |
42 | .tc _dl_hwcap[TC],_dl_hwcap |
43 | # endif |
44 | .section ".text" |
45 | #endif |
46 | |
47 | .machine "altivec" |
48 | ENTRY (__longjmp) |
49 | CALL_MCOUNT 2 |
50 | #ifndef __NO_VMX__ |
51 | addis r5,r2,.LC__dl_hwcap@toc@ha |
52 | ld r5,.LC__dl_hwcap@toc@l(r5) |
53 | # ifdef SHARED |
54 | /* Load _rtld-global._dl_hwcap. */ |
55 | ld r5,RTLD_GLOBAL_RO_DL_HWCAP_OFFSET(r5) |
56 | # else |
57 | /* Load extern _dl_hwcap. */ |
58 | ld r5,0(r5) |
59 | # endif |
60 | andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16) |
61 | beq L(no_vmx) |
62 | la r5,((JB_VRS)*8)(3) |
63 | andi. r6,r5,0xf |
64 | lwz r0,((JB_VRSAVE)*8)(3) /* 32-bit VRSAVE. */ |
65 | mtspr VRSAVE,r0 |
66 | beq+ L(aligned_restore_vmx) |
67 | addi r6,r5,16 |
68 | lvsl v0,0,r5 |
69 | lvx v1,0,r5 |
70 | addi r5,r5,32 |
71 | lvx v21,0,r6 |
72 | vperm v20,v1,v21,v0 |
73 | # define load_misaligned_vmx_lo_loaded(loadvr,lovr,shiftvr,loadgpr,addgpr) \ |
74 | addi addgpr,addgpr,32; \ |
75 | lvx lovr,0,loadgpr; \ |
76 | vperm loadvr,loadvr,lovr,shiftvr; |
77 | load_misaligned_vmx_lo_loaded(v21,v22,v0,r5,r6) |
78 | load_misaligned_vmx_lo_loaded(v22,v23,v0,r6,r5) |
79 | load_misaligned_vmx_lo_loaded(v23,v24,v0,r5,r6) |
80 | load_misaligned_vmx_lo_loaded(v24,v25,v0,r6,r5) |
81 | load_misaligned_vmx_lo_loaded(v25,v26,v0,r5,r6) |
82 | load_misaligned_vmx_lo_loaded(v26,v27,v0,r6,r5) |
83 | load_misaligned_vmx_lo_loaded(v27,v28,v0,r5,r6) |
84 | load_misaligned_vmx_lo_loaded(v28,v29,v0,r6,r5) |
85 | load_misaligned_vmx_lo_loaded(v29,v30,v0,r5,r6) |
86 | load_misaligned_vmx_lo_loaded(v30,v31,v0,r6,r5) |
87 | lvx v1,0,r5 |
88 | vperm v31,v31,v1,v0 |
89 | b L(no_vmx) |
90 | L(aligned_restore_vmx): |
91 | addi r6,r5,16 |
92 | lvx v20,0,r5 |
93 | addi r5,r5,32 |
94 | lvx v21,0,r6 |
95 | addi r6,r6,32 |
96 | lvx v22,0,r5 |
97 | addi r5,r5,32 |
98 | lvx v23,0,r6 |
99 | addi r6,r6,32 |
100 | lvx v24,0,r5 |
101 | addi r5,r5,32 |
102 | lvx v25,0,r6 |
103 | addi r6,r6,32 |
104 | lvx v26,0,r5 |
105 | addi r5,r5,32 |
106 | lvx v27,0,r6 |
107 | addi r6,r6,32 |
108 | lvx v28,0,r5 |
109 | addi r5,r5,32 |
110 | lvx v29,0,r6 |
111 | addi r6,r6,32 |
112 | lvx v30,0,r5 |
113 | lvx v31,0,r6 |
114 | L(no_vmx): |
115 | #endif |
116 | #if defined PTR_DEMANGLE || defined CHECK_SP |
117 | ld r22,(JB_GPR1*8)(r3) |
118 | #else |
119 | ld r1,(JB_GPR1*8)(r3) |
120 | #endif |
121 | #ifdef PTR_DEMANGLE |
122 | # ifdef CHECK_SP |
123 | PTR_DEMANGLE3 (r22, r22, r25) |
124 | # else |
125 | PTR_DEMANGLE3 (r1, r22, r25) |
126 | # endif |
127 | #endif |
128 | #ifdef CHECK_SP |
129 | CHECK_SP (r22) |
130 | mr r1,r22 |
131 | #endif |
132 | ld r2,(JB_GPR2*8)(r3) |
133 | ld r0,(JB_LR*8)(r3) |
134 | ld r14,((JB_GPRS+0)*8)(r3) |
135 | lfd fp14,((JB_FPRS+0)*8)(r3) |
136 | ld r15,((JB_GPRS+1)*8)(r3) |
137 | lfd fp15,((JB_FPRS+1)*8)(r3) |
138 | ld r16,((JB_GPRS+2)*8)(r3) |
139 | lfd fp16,((JB_FPRS+2)*8)(r3) |
140 | ld r17,((JB_GPRS+3)*8)(r3) |
141 | lfd fp17,((JB_FPRS+3)*8)(r3) |
142 | ld r18,((JB_GPRS+4)*8)(r3) |
143 | lfd fp18,((JB_FPRS+4)*8)(r3) |
144 | ld r19,((JB_GPRS+5)*8)(r3) |
145 | lfd fp19,((JB_FPRS+5)*8)(r3) |
146 | ld r20,((JB_GPRS+6)*8)(r3) |
147 | lfd fp20,((JB_FPRS+6)*8)(r3) |
148 | #ifdef PTR_DEMANGLE |
149 | PTR_DEMANGLE2 (r0, r25) |
150 | #endif |
151 | /* longjmp/longjmp_target probe expects longjmp first argument (8@3), |
152 | second argument (-4@4), and target address (8@0), respectively. */ |
153 | LIBC_PROBE (longjmp, 3, 8@3, -4@4, 8@0) |
154 | mtlr r0 |
155 | std r2,FRAME_TOC_SAVE(r1) /* Restore the TOC save area. */ |
156 | ld r21,((JB_GPRS+7)*8)(r3) |
157 | lfd fp21,((JB_FPRS+7)*8)(r3) |
158 | ld r22,((JB_GPRS+8)*8)(r3) |
159 | lfd fp22,((JB_FPRS+8)*8)(r3) |
160 | lwz r5,((JB_CR*8)+4)(r3) /* 32-bit CR. */ |
161 | ld r23,((JB_GPRS+9)*8)(r3) |
162 | lfd fp23,((JB_FPRS+9)*8)(r3) |
163 | ld r24,((JB_GPRS+10)*8)(r3) |
164 | lfd fp24,((JB_FPRS+10)*8)(r3) |
165 | ld r25,((JB_GPRS+11)*8)(r3) |
166 | lfd fp25,((JB_FPRS+11)*8)(r3) |
167 | mtcrf 0xFF,r5 |
168 | ld r26,((JB_GPRS+12)*8)(r3) |
169 | lfd fp26,((JB_FPRS+12)*8)(r3) |
170 | ld r27,((JB_GPRS+13)*8)(r3) |
171 | lfd fp27,((JB_FPRS+13)*8)(r3) |
172 | ld r28,((JB_GPRS+14)*8)(r3) |
173 | lfd fp28,((JB_FPRS+14)*8)(r3) |
174 | ld r29,((JB_GPRS+15)*8)(r3) |
175 | lfd fp29,((JB_FPRS+15)*8)(r3) |
176 | ld r30,((JB_GPRS+16)*8)(r3) |
177 | lfd fp30,((JB_FPRS+16)*8)(r3) |
178 | ld r31,((JB_GPRS+17)*8)(r3) |
179 | lfd fp31,((JB_FPRS+17)*8)(r3) |
180 | LIBC_PROBE (longjmp_target, 3, 8@3, -4@4, 8@0) |
181 | mr r3,r4 |
182 | blr |
183 | END (__longjmp) |
184 | |