1/* Function cbrtf vectorized with SSE4.
2 Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 https://www.gnu.org/licenses/. */
18
19/*
20 * ALGORITHM DESCRIPTION:
21 *
22 * x=2^{3*k+j} * 1.b1 b2 ... b5 b6 ... b52
23 * Let r=(x*2^{-3k-j} - 1.b1 b2 ... b5 1)* rcp[b1 b2 ..b5],
24 * where rcp[b1 b2 .. b5]=1/(1.b1 b2 b3 b4 b5 1) in single precision
25 * cbrtf(2^j * 1. b1 b2 .. b5 1) is approximated as T[j][b1..b5]+D[j][b1..b5]
26 * (T stores the high 24 bits, D stores the low order bits)
27 * Result=2^k*T+(2^k*T*r)*P+2^k*D
28 * where P=p1+p2*r+..
29 *
30 */
31
32/* Offsets for data table __svml_scbrt_data_internal
33 */
34#define _sRcp 0
35#define _sCbrtHL 128
36#define _sP2 512
37#define _sP1 528
38#define _sMantissaMask 544
39#define _sMantissaMask1 560
40#define _sExpMask 576
41#define _sExpMask1 592
42#define _iRcpIndexMask 608
43#define _iBExpMask 624
44#define _iSignMask 640
45#define _iBias 656
46#define _iOne 672
47#define _i555 688
48#define _iAbsMask 704
49#define _iSubConst 720
50#define _iCmpConst 736
51
52#include <sysdep.h>
53
54 .section .text.sse4, "ax", @progbits
55ENTRY(_ZGVbN4v_cbrtf_sse4)
56 subq $72, %rsp
57 cfi_def_cfa_offset(80)
58
59 /*
60 * Load constants
61 * Reciprocal index calculation
62 */
63 movaps %xmm0, %xmm2
64 movdqu _iRcpIndexMask+__svml_scbrt_data_internal(%rip), %xmm3
65 psrld $16, %xmm2
66 pand %xmm2, %xmm3
67
68 /* Load reciprocal value */
69 lea __svml_scbrt_data_internal(%rip), %rdx
70 pshufd $1, %xmm3, %xmm5
71
72 /* Get signed biased exponent */
73 psrld $7, %xmm2
74 movd %xmm3, %eax
75 movd %xmm5, %ecx
76
77 /* Get absolute biased exponent */
78 movdqu _iBExpMask+__svml_scbrt_data_internal(%rip), %xmm15
79
80 /*
81 * Calculate exponent/3
82 * i555Exp=(2^{12}-1)/3*exponent
83 */
84 movdqu _i555+__svml_scbrt_data_internal(%rip), %xmm14
85 pand %xmm2, %xmm15
86 movslq %eax, %rax
87 movdqa %xmm14, %xmm5
88 movslq %ecx, %rcx
89 psrlq $32, %xmm14
90 pmuludq %xmm15, %xmm5
91 movd (%rdx, %rax), %xmm4
92 movd (%rdx, %rcx), %xmm6
93 punpckldq %xmm6, %xmm4
94 movdqa %xmm15, %xmm6
95 psrlq $32, %xmm15
96 pmuludq %xmm14, %xmm15
97 pshufd $2, %xmm3, %xmm7
98 psllq $32, %xmm15
99 pshufd $3, %xmm3, %xmm8
100 movd %xmm7, %esi
101 movd %xmm8, %edi
102
103 /* Argument reduction */
104 movups _sMantissaMask+__svml_scbrt_data_internal(%rip), %xmm12
105 movups _sMantissaMask1+__svml_scbrt_data_internal(%rip), %xmm11
106 andps %xmm0, %xmm12
107 pand .FLT_17(%rip), %xmm5
108 andps %xmm0, %xmm11
109 movslq %esi, %rsi
110 por %xmm15, %xmm5
111 movslq %edi, %rdi
112
113 /* Get K (exponent=3*k+j) */
114 psrld $12, %xmm5
115 orps _sExpMask+__svml_scbrt_data_internal(%rip), %xmm12
116 orps _sExpMask1+__svml_scbrt_data_internal(%rip), %xmm11
117 psubd _iOne+__svml_scbrt_data_internal(%rip), %xmm6
118
119 /* r=y-y` */
120 subps %xmm11, %xmm12
121
122 /* Get J */
123 psubd %xmm5, %xmm6
124 movdqu _iAbsMask+__svml_scbrt_data_internal(%rip), %xmm1
125 psubd %xmm5, %xmm6
126 movd (%rdx, %rsi), %xmm10
127 pand %xmm0, %xmm1
128 movd (%rdx, %rdi), %xmm9
129 psubd %xmm5, %xmm6
130 punpckldq %xmm9, %xmm10
131
132 /* Get 128*J */
133 pslld $7, %xmm6
134 punpcklqdq %xmm10, %xmm4
135
136 /*
137 * iCbrtIndex=4*l+128*j
138 * Zero index if callout expected
139 */
140 paddd %xmm6, %xmm3
141 psubd _iSubConst+__svml_scbrt_data_internal(%rip), %xmm1
142 pcmpgtd _iCmpConst+__svml_scbrt_data_internal(%rip), %xmm1
143
144 /* r=(y-y`)*rcp_table(y`) */
145 mulps %xmm12, %xmm4
146 movmskps %xmm1, %eax
147
148 /* Biased exponent-1 */
149 movdqu _iSignMask+__svml_scbrt_data_internal(%rip), %xmm13
150 pandn %xmm3, %xmm1
151
152 /*
153 * Add 2/3*(bias-1)+1 to (k+1/3*(bias-1))
154 * Attach sign to exponent
155 */
156 movdqu _iBias+__svml_scbrt_data_internal(%rip), %xmm12
157 pand %xmm13, %xmm2
158 paddd %xmm5, %xmm12
159
160 /* Load Cbrt table Hi & Lo values */
161 movd %xmm1, %r8d
162 por %xmm2, %xmm12
163 pshufd $1, %xmm1, %xmm2
164 pslld $23, %xmm12
165 pshufd $2, %xmm1, %xmm7
166 pshufd $3, %xmm1, %xmm1
167 movd %xmm2, %r9d
168 movd %xmm7, %r10d
169 movd %xmm1, %r11d
170
171 /* Polynomial: p1+r*(p2*r+r*(p3+r*p4)) */
172 movups _sP2+__svml_scbrt_data_internal(%rip), %xmm11
173 mulps %xmm4, %xmm11
174 movslq %r8d, %r8
175 addps _sP1+__svml_scbrt_data_internal(%rip), %xmm11
176 movslq %r9d, %r9
177 movslq %r10d, %r10
178 movslq %r11d, %r11
179 movd 128(%rdx, %r8), %xmm10
180 movd 128(%rdx, %r9), %xmm3
181 movd 128(%rdx, %r10), %xmm9
182 movd 128(%rdx, %r11), %xmm8
183 punpckldq %xmm3, %xmm10
184 punpckldq %xmm8, %xmm9
185 punpcklqdq %xmm9, %xmm10
186
187 /* sCbrtHi *= 2^k */
188 mulps %xmm10, %xmm12
189
190 /* T`*r */
191 mulps %xmm12, %xmm4
192
193 /* (T`*r)*P */
194 mulps %xmm4, %xmm11
195
196 /*
197 * T`*r*P+D`
198 * result = T`+(T`*r*P+D`)
199 */
200 addps %xmm11, %xmm12
201 testl %eax, %eax
202
203 /* Go to special inputs processing branch */
204 jne L(SPECIAL_VALUES_BRANCH)
205 # LOE rbx rbp r12 r13 r14 r15 eax xmm0 xmm12
206
207 /* Restore registers
208 * and exit the function
209 */
210
211L(EXIT):
212 movaps %xmm12, %xmm0
213 addq $72, %rsp
214 cfi_def_cfa_offset(8)
215 ret
216 cfi_def_cfa_offset(80)
217
218 /* Branch to process
219 * special inputs
220 */
221
222L(SPECIAL_VALUES_BRANCH):
223 movups %xmm0, 32(%rsp)
224 movups %xmm12, 48(%rsp)
225 # LOE rbx rbp r12 r13 r14 r15 eax
226
227 xorl %edx, %edx
228 movq %r12, 16(%rsp)
229 cfi_offset(12, -64)
230 movl %edx, %r12d
231 movq %r13, 8(%rsp)
232 cfi_offset(13, -72)
233 movl %eax, %r13d
234 movq %r14, (%rsp)
235 cfi_offset(14, -80)
236 # LOE rbx rbp r15 r12d r13d
237
238 /* Range mask
239 * bits check
240 */
241
242L(RANGEMASK_CHECK):
243 btl %r12d, %r13d
244
245 /* Call scalar math function */
246 jc L(SCALAR_MATH_CALL)
247 # LOE rbx rbp r15 r12d r13d
248
249 /* Special inputs
250 * processing loop
251 */
252
253L(SPECIAL_VALUES_LOOP):
254 incl %r12d
255 cmpl $4, %r12d
256
257 /* Check bits in range mask */
258 jl L(RANGEMASK_CHECK)
259 # LOE rbx rbp r15 r12d r13d
260
261 movq 16(%rsp), %r12
262 cfi_restore(12)
263 movq 8(%rsp), %r13
264 cfi_restore(13)
265 movq (%rsp), %r14
266 cfi_restore(14)
267 movups 48(%rsp), %xmm12
268
269 /* Go to exit */
270 jmp L(EXIT)
271 cfi_offset(12, -64)
272 cfi_offset(13, -72)
273 cfi_offset(14, -80)
274 # LOE rbx rbp r12 r13 r14 r15 xmm12
275
276 /* Scalar math function call
277 * to process special input
278 */
279
280L(SCALAR_MATH_CALL):
281 movl %r12d, %r14d
282 movss 32(%rsp, %r14, 4), %xmm0
283 call cbrtf@PLT
284 # LOE rbx rbp r14 r15 r12d r13d xmm0
285
286 movss %xmm0, 48(%rsp, %r14, 4)
287
288 /* Process special inputs in loop */
289 jmp L(SPECIAL_VALUES_LOOP)
290 # LOE rbx rbp r15 r12d r13d
291END(_ZGVbN4v_cbrtf_sse4)
292
293 .section .rodata, "a"
294 .align 16
295
296#ifdef __svml_scbrt_data_internal_typedef
297typedef unsigned int VUINT32;
298typedef struct {
299 __declspec(align(16)) VUINT32 _sRcp[32][1];
300 __declspec(align(16)) VUINT32 _sCbrtHL[96][1];
301 __declspec(align(16)) VUINT32 _sP2[4][1];
302 __declspec(align(16)) VUINT32 _sP1[4][1];
303 __declspec(align(16)) VUINT32 _sMantissaMask[4][1];
304 __declspec(align(16)) VUINT32 _sMantissaMask1[4][1];
305 __declspec(align(16)) VUINT32 _sExpMask[4][1];
306 __declspec(align(16)) VUINT32 _sExpMask1[4][1];
307 __declspec(align(16)) VUINT32 _iRcpIndexMask[4][1];
308 __declspec(align(16)) VUINT32 _iBExpMask[4][1];
309 __declspec(align(16)) VUINT32 _iSignMask[4][1];
310 __declspec(align(16)) VUINT32 _iBias[4][1];
311 __declspec(align(16)) VUINT32 _iOne[4][1];
312 __declspec(align(16)) VUINT32 _i555[4][1];
313 __declspec(align(16)) VUINT32 _iAbsMask[4][1];
314 __declspec(align(16)) VUINT32 _iSubConst[4][1];
315 __declspec(align(16)) VUINT32 _iCmpConst[4][1];
316} __svml_scbrt_data_internal;
317#endif
318__svml_scbrt_data_internal:
319 /* _sRcp */
320 .long 0xBF7C0FC1 /* (1/(1+0/32+1/64)) = -.984615 */
321 .long 0xBF74898D /* (1/(1+1/32+1/64)) = -.955224 */
322 .long 0xBF6D7304 /* (1/(1+2/32+1/64)) = -.927536 */
323 .long 0xBF66C2B4 /* (1/(1+3/32+1/64)) = -.901408 */
324 .long 0xBF607038 /* (1/(1+4/32+1/64)) = -.876712 */
325 .long 0xBF5A740E /* (1/(1+5/32+1/64)) = -.853333 */
326 .long 0xBF54C77B /* (1/(1+6/32+1/64)) = -.831169 */
327 .long 0xBF4F6475 /* (1/(1+7/32+1/64)) = -.810127 */
328 .long 0xBF4A4588 /* (1/(1+8/32+1/64)) = -.790123 */
329 .long 0xBF4565C8 /* (1/(1+9/32+1/64)) = -.771084 */
330 .long 0xBF40C0C1 /* (1/(1+10/32+1/64)) = -.752941 */
331 .long 0xBF3C5264 /* (1/(1+11/32+1/64)) = -.735632 */
332 .long 0xBF381703 /* (1/(1+12/32+1/64)) = -.719101 */
333 .long 0xBF340B41 /* (1/(1+13/32+1/64)) = -.703297 */
334 .long 0xBF302C0B /* (1/(1+14/32+1/64)) = -.688172 */
335 .long 0xBF2C7692 /* (1/(1+15/32+1/64)) = -.673684 */
336 .long 0xBF28E83F /* (1/(1+16/32+1/64)) = -.659794 */
337 .long 0xBF257EB5 /* (1/(1+17/32+1/64)) = -.646465 */
338 .long 0xBF2237C3 /* (1/(1+18/32+1/64)) = -.633663 */
339 .long 0xBF1F1166 /* (1/(1+19/32+1/64)) = -.621359 */
340 .long 0xBF1C09C1 /* (1/(1+20/32+1/64)) = -.609524 */
341 .long 0xBF191F1A /* (1/(1+21/32+1/64)) = -.598131 */
342 .long 0xBF164FDA /* (1/(1+22/32+1/64)) = -.587156 */
343 .long 0xBF139A86 /* (1/(1+23/32+1/64)) = -.576577 */
344 .long 0xBF10FDBC /* (1/(1+24/32+1/64)) = -.566372 */
345 .long 0xBF0E7835 /* (1/(1+25/32+1/64)) = -.556522 */
346 .long 0xBF0C08C1 /* (1/(1+26/32+1/64)) = -.547009 */
347 .long 0xBF09AE41 /* (1/(1+27/32+1/64)) = -.537815 */
348 .long 0xBF0767AB /* (1/(1+28/32+1/64)) = -.528926 */
349 .long 0xBF053408 /* (1/(1+29/32+1/64)) = -.520325 */
350 .long 0xBF03126F /* (1/(1+30/32+1/64)) = -.512 */
351 .long 0xBF010204 /* (1/(1+31/32+1/64)) = -.503937 */
352 /* _sCbrtHL */
353 .align 16
354 .long 0x3F80A9C9 /* HI((2^0*(1+0/32+1/64))^(1/3)) = 1.005181 */
355 .long 0x3F81F833 /* HI((2^0*(1+1/32+1/64))^(1/3)) = 1.015387 */
356 .long 0x3F834007 /* HI((2^0*(1+2/32+1/64))^(1/3)) = 1.025391 */
357 .long 0x3F848194 /* HI((2^0*(1+3/32+1/64))^(1/3)) = 1.035204 */
358 .long 0x3F85BD25 /* HI((2^0*(1+4/32+1/64))^(1/3)) = 1.044835 */
359 .long 0x3F86F300 /* HI((2^0*(1+5/32+1/64))^(1/3)) = 1.054291 */
360 .long 0x3F882365 /* HI((2^0*(1+6/32+1/64))^(1/3)) = 1.06358 */
361 .long 0x3F894E90 /* HI((2^0*(1+7/32+1/64))^(1/3)) = 1.07271 */
362 .long 0x3F8A74B9 /* HI((2^0*(1+8/32+1/64))^(1/3)) = 1.081687 */
363 .long 0x3F8B9615 /* HI((2^0*(1+9/32+1/64))^(1/3)) = 1.090518 */
364 .long 0x3F8CB2D4 /* HI((2^0*(1+10/32+1/64))^(1/3)) = 1.099207 */
365 .long 0x3F8DCB24 /* HI((2^0*(1+11/32+1/64))^(1/3)) = 1.107762 */
366 .long 0x3F8EDF31 /* HI((2^0*(1+12/32+1/64))^(1/3)) = 1.116186 */
367 .long 0x3F8FEF22 /* HI((2^0*(1+13/32+1/64))^(1/3)) = 1.124485 */
368 .long 0x3F90FB1F /* HI((2^0*(1+14/32+1/64))^(1/3)) = 1.132664 */
369 .long 0x3F92034C /* HI((2^0*(1+15/32+1/64))^(1/3)) = 1.140726 */
370 .long 0x3F9307CA /* HI((2^0*(1+16/32+1/64))^(1/3)) = 1.148675 */
371 .long 0x3F9408B9 /* HI((2^0*(1+17/32+1/64))^(1/3)) = 1.156516 */
372 .long 0x3F950638 /* HI((2^0*(1+18/32+1/64))^(1/3)) = 1.164252 */
373 .long 0x3F960064 /* HI((2^0*(1+19/32+1/64))^(1/3)) = 1.171887 */
374 .long 0x3F96F759 /* HI((2^0*(1+20/32+1/64))^(1/3)) = 1.179423 */
375 .long 0x3F97EB2F /* HI((2^0*(1+21/32+1/64))^(1/3)) = 1.186865 */
376 .long 0x3F98DC01 /* HI((2^0*(1+22/32+1/64))^(1/3)) = 1.194214 */
377 .long 0x3F99C9E5 /* HI((2^0*(1+23/32+1/64))^(1/3)) = 1.201474 */
378 .long 0x3F9AB4F2 /* HI((2^0*(1+24/32+1/64))^(1/3)) = 1.208647 */
379 .long 0x3F9B9D3D /* HI((2^0*(1+25/32+1/64))^(1/3)) = 1.215736 */
380 .long 0x3F9C82DA /* HI((2^0*(1+26/32+1/64))^(1/3)) = 1.222743 */
381 .long 0x3F9D65DD /* HI((2^0*(1+27/32+1/64))^(1/3)) = 1.229671 */
382 .long 0x3F9E4659 /* HI((2^0*(1+28/32+1/64))^(1/3)) = 1.236522 */
383 .long 0x3F9F245F /* HI((2^0*(1+29/32+1/64))^(1/3)) = 1.243297 */
384 .long 0x3FA00000 /* HI((2^0*(1+30/32+1/64))^(1/3)) = 1.25 */
385 .long 0x3FA0D94C /* HI((2^0*(1+31/32+1/64))^(1/3)) = 1.256631 */
386 .long 0x3FA21B02 /* HI((2^1*(1+0/32+1/64))^(1/3)) = 1.266449 */
387 .long 0x3FA3C059 /* HI((2^1*(1+1/32+1/64))^(1/3)) = 1.279307 */
388 .long 0x3FA55D61 /* HI((2^1*(1+2/32+1/64))^(1/3)) = 1.291912 */
389 .long 0x3FA6F282 /* HI((2^1*(1+3/32+1/64))^(1/3)) = 1.304276 */
390 .long 0x3FA8801A /* HI((2^1*(1+4/32+1/64))^(1/3)) = 1.316409 */
391 .long 0x3FAA067E /* HI((2^1*(1+5/32+1/64))^(1/3)) = 1.328323 */
392 .long 0x3FAB8602 /* HI((2^1*(1+6/32+1/64))^(1/3)) = 1.340027 */
393 .long 0x3FACFEEF /* HI((2^1*(1+7/32+1/64))^(1/3)) = 1.35153 */
394 .long 0x3FAE718E /* HI((2^1*(1+8/32+1/64))^(1/3)) = 1.36284 */
395 .long 0x3FAFDE1F /* HI((2^1*(1+9/32+1/64))^(1/3)) = 1.373966 */
396 .long 0x3FB144E1 /* HI((2^1*(1+10/32+1/64))^(1/3)) = 1.384915 */
397 .long 0x3FB2A60D /* HI((2^1*(1+11/32+1/64))^(1/3)) = 1.395692 */
398 .long 0x3FB401DA /* HI((2^1*(1+12/32+1/64))^(1/3)) = 1.406307 */
399 .long 0x3FB5587B /* HI((2^1*(1+13/32+1/64))^(1/3)) = 1.416763 */
400 .long 0x3FB6AA20 /* HI((2^1*(1+14/32+1/64))^(1/3)) = 1.427067 */
401 .long 0x3FB7F6F7 /* HI((2^1*(1+15/32+1/64))^(1/3)) = 1.437224 */
402 .long 0x3FB93F29 /* HI((2^1*(1+16/32+1/64))^(1/3)) = 1.44724 */
403 .long 0x3FBA82E1 /* HI((2^1*(1+17/32+1/64))^(1/3)) = 1.457119 */
404 .long 0x3FBBC244 /* HI((2^1*(1+18/32+1/64))^(1/3)) = 1.466866 */
405 .long 0x3FBCFD77 /* HI((2^1*(1+19/32+1/64))^(1/3)) = 1.476485 */
406 .long 0x3FBE349B /* HI((2^1*(1+20/32+1/64))^(1/3)) = 1.48598 */
407 .long 0x3FBF67D3 /* HI((2^1*(1+21/32+1/64))^(1/3)) = 1.495356 */
408 .long 0x3FC0973C /* HI((2^1*(1+22/32+1/64))^(1/3)) = 1.504615 */
409 .long 0x3FC1C2F6 /* HI((2^1*(1+23/32+1/64))^(1/3)) = 1.513762 */
410 .long 0x3FC2EB1A /* HI((2^1*(1+24/32+1/64))^(1/3)) = 1.5228 */
411 .long 0x3FC40FC6 /* HI((2^1*(1+25/32+1/64))^(1/3)) = 1.531731 */
412 .long 0x3FC53112 /* HI((2^1*(1+26/32+1/64))^(1/3)) = 1.54056 */
413 .long 0x3FC64F16 /* HI((2^1*(1+27/32+1/64))^(1/3)) = 1.549289 */
414 .long 0x3FC769EB /* HI((2^1*(1+28/32+1/64))^(1/3)) = 1.55792 */
415 .long 0x3FC881A6 /* HI((2^1*(1+29/32+1/64))^(1/3)) = 1.566457 */
416 .long 0x3FC9965D /* HI((2^1*(1+30/32+1/64))^(1/3)) = 1.574901 */
417 .long 0x3FCAA825 /* HI((2^1*(1+31/32+1/64))^(1/3)) = 1.583256 */
418 .long 0x3FCC3D79 /* HI((2^2*(1+0/32+1/64))^(1/3)) = 1.595626 */
419 .long 0x3FCE5054 /* HI((2^2*(1+1/32+1/64))^(1/3)) = 1.611826 */
420 .long 0x3FD058B8 /* HI((2^2*(1+2/32+1/64))^(1/3)) = 1.627707 */
421 .long 0x3FD25726 /* HI((2^2*(1+3/32+1/64))^(1/3)) = 1.643285 */
422 .long 0x3FD44C15 /* HI((2^2*(1+4/32+1/64))^(1/3)) = 1.658572 */
423 .long 0x3FD637F2 /* HI((2^2*(1+5/32+1/64))^(1/3)) = 1.673582 */
424 .long 0x3FD81B24 /* HI((2^2*(1+6/32+1/64))^(1/3)) = 1.688328 */
425 .long 0x3FD9F60B /* HI((2^2*(1+7/32+1/64))^(1/3)) = 1.702821 */
426 .long 0x3FDBC8FE /* HI((2^2*(1+8/32+1/64))^(1/3)) = 1.717071 */
427 .long 0x3FDD9452 /* HI((2^2*(1+9/32+1/64))^(1/3)) = 1.731089 */
428 .long 0x3FDF5853 /* HI((2^2*(1+10/32+1/64))^(1/3)) = 1.744883 */
429 .long 0x3FE1154B /* HI((2^2*(1+11/32+1/64))^(1/3)) = 1.758462 */
430 .long 0x3FE2CB7F /* HI((2^2*(1+12/32+1/64))^(1/3)) = 1.771835 */
431 .long 0x3FE47B2E /* HI((2^2*(1+13/32+1/64))^(1/3)) = 1.785009 */
432 .long 0x3FE62496 /* HI((2^2*(1+14/32+1/64))^(1/3)) = 1.797992 */
433 .long 0x3FE7C7F0 /* HI((2^2*(1+15/32+1/64))^(1/3)) = 1.810789 */
434 .long 0x3FE96571 /* HI((2^2*(1+16/32+1/64))^(1/3)) = 1.823408 */
435 .long 0x3FEAFD4C /* HI((2^2*(1+17/32+1/64))^(1/3)) = 1.835855 */
436 .long 0x3FEC8FB3 /* HI((2^2*(1+18/32+1/64))^(1/3)) = 1.848135 */
437 .long 0x3FEE1CD3 /* HI((2^2*(1+19/32+1/64))^(1/3)) = 1.860255 */
438 .long 0x3FEFA4D7 /* HI((2^2*(1+20/32+1/64))^(1/3)) = 1.872218 */
439 .long 0x3FF127E9 /* HI((2^2*(1+21/32+1/64))^(1/3)) = 1.88403 */
440 .long 0x3FF2A62F /* HI((2^2*(1+22/32+1/64))^(1/3)) = 1.895697 */
441 .long 0x3FF41FD0 /* HI((2^2*(1+23/32+1/64))^(1/3)) = 1.907221 */
442 .long 0x3FF594EE /* HI((2^2*(1+24/32+1/64))^(1/3)) = 1.918607 */
443 .long 0x3FF705AC /* HI((2^2*(1+25/32+1/64))^(1/3)) = 1.929861 */
444 .long 0x3FF8722A /* HI((2^2*(1+26/32+1/64))^(1/3)) = 1.940984 */
445 .long 0x3FF9DA86 /* HI((2^2*(1+27/32+1/64))^(1/3)) = 1.951981 */
446 .long 0x3FFB3EDE /* HI((2^2*(1+28/32+1/64))^(1/3)) = 1.962856 */
447 .long 0x3FFC9F4E /* HI((2^2*(1+29/32+1/64))^(1/3)) = 1.973612 */
448 .long 0x3FFDFBF2 /* HI((2^2*(1+30/32+1/64))^(1/3)) = 1.984251 */
449 .long 0x3FFF54E3 /* HI((2^2*(1+31/32+1/64))^(1/3)) = 1.994778 */
450 .align 16
451 .long 0xBDE3A962, 0xBDE3A962, 0xBDE3A962, 0xBDE3A962 /* _sP2 */
452 .align 16
453 .long 0x3EAAAC91, 0x3EAAAC91, 0x3EAAAC91, 0x3EAAAC91 /* _sP1 */
454 .align 16
455 .long 0x007fffff, 0x007fffff, 0x007fffff, 0x007fffff /* _sMantissaMask (EXP_MSK3) */
456 .align 16
457 .long 0x007e0000, 0x007e0000, 0x007e0000, 0x007e0000 /* _sMantissaMask1 (SIG_MASK) */
458 .align 16
459 .long 0xBF800000, 0xBF800000, 0xBF800000, 0xBF800000 /* _sExpMask (EXP_MASK) */
460 .align 16
461 .long 0xBF820000, 0xBF820000, 0xBF820000, 0xBF820000 /* _sExpMask1 (EXP_MASK2) */
462 .align 16
463 .long 0x0000007c, 0x0000007c, 0x0000007c, 0x0000007c /* _iRcpIndexMask */
464 .align 16
465 .long 0x000000ff, 0x000000ff, 0x000000ff, 0x000000ff /* _iBExpMask */
466 .align 16
467 .long 0x00000100, 0x00000100, 0x00000100, 0x00000100 /* _iSignMask */
468 .align 16
469 .long 0x00000055, 0x00000055, 0x00000055, 0x00000055 /* _iBias */
470 .align 16
471 .long 0x00000001, 0x00000001, 0x00000001, 0x00000001 /* _iOne */
472 .align 16
473 .long 0x00000555, 0x00000555, 0x00000555, 0x00000555 /* _i555 */
474 .align 16
475 .long 0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fffffff /* _iAbsMask */
476 .align 16
477 .long 0x80800000, 0x80800000, 0x80800000, 0x80800000 /* _iSubConst */
478 .align 16
479 .long 0xFEFFFFFF, 0xFEFFFFFF, 0xFEFFFFFF, 0xFEFFFFFF /* _iCmpConst */
480 .align 16
481 .type __svml_scbrt_data_internal, @object
482 .size __svml_scbrt_data_internal, .-__svml_scbrt_data_internal
483 .align 16
484
485.FLT_17:
486 .long 0xffffffff, 0x00000000, 0xffffffff, 0x00000000
487 .type .FLT_17, @object
488 .size .FLT_17, 16
489

source code of glibc/sysdeps/x86_64/fpu/multiarch/svml_s_cbrtf4_core_sse4.S