Warning: This file is not a C or C++ file. It does not have highlighting.
1 | /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------=== |
---|---|
2 | * |
3 | * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | * See https://llvm.org/LICENSE.txt for license information. |
5 | * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | * |
7 | *===-----------------------------------------------------------------------=== |
8 | */ |
9 | |
10 | #ifndef __WASM_SIMD128_H |
11 | #define __WASM_SIMD128_H |
12 | |
13 | #include <stdbool.h> |
14 | #include <stdint.h> |
15 | |
16 | // User-facing type |
17 | typedef int32_t v128_t __attribute__((__vector_size__(16), __aligned__(16))); |
18 | |
19 | // Internal types determined by clang builtin definitions |
20 | typedef int32_t __v128_u __attribute__((__vector_size__(16), __aligned__(1))); |
21 | typedef signed char __i8x16 |
22 | __attribute__((__vector_size__(16), __aligned__(16))); |
23 | typedef unsigned char __u8x16 |
24 | __attribute__((__vector_size__(16), __aligned__(16))); |
25 | typedef short __i16x8 __attribute__((__vector_size__(16), __aligned__(16))); |
26 | typedef unsigned short __u16x8 |
27 | __attribute__((__vector_size__(16), __aligned__(16))); |
28 | typedef int __i32x4 __attribute__((__vector_size__(16), __aligned__(16))); |
29 | typedef unsigned int __u32x4 |
30 | __attribute__((__vector_size__(16), __aligned__(16))); |
31 | typedef long long __i64x2 __attribute__((__vector_size__(16), __aligned__(16))); |
32 | typedef unsigned long long __u64x2 |
33 | __attribute__((__vector_size__(16), __aligned__(16))); |
34 | typedef float __f32x4 __attribute__((__vector_size__(16), __aligned__(16))); |
35 | typedef double __f64x2 __attribute__((__vector_size__(16), __aligned__(16))); |
36 | |
37 | typedef signed char __i8x8 __attribute__((__vector_size__(8), __aligned__(8))); |
38 | typedef unsigned char __u8x8 |
39 | __attribute__((__vector_size__(8), __aligned__(8))); |
40 | typedef short __i16x4 __attribute__((__vector_size__(8), __aligned__(8))); |
41 | typedef unsigned short __u16x4 |
42 | __attribute__((__vector_size__(8), __aligned__(8))); |
43 | typedef int __i32x2 __attribute__((__vector_size__(8), __aligned__(8))); |
44 | typedef unsigned int __u32x2 |
45 | __attribute__((__vector_size__(8), __aligned__(8))); |
46 | typedef float __f32x2 __attribute__((__vector_size__(8), __aligned__(8))); |
47 | |
48 | #define __DEFAULT_FN_ATTRS \ |
49 | __attribute__((__always_inline__, __nodebug__, __target__("simd128"), \ |
50 | __min_vector_width__(128))) |
51 | |
52 | #define __REQUIRE_CONSTANT(c) \ |
53 | __attribute__((__diagnose_if__(!__builtin_constant_p(c), \ |
54 | #c " must be constant", "error"))) |
55 | |
56 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load(const void *__mem) { |
57 | // UB-free unaligned access copied from xmmintrin.h |
58 | struct __wasm_v128_load_struct { |
59 | __v128_u __v; |
60 | } __attribute__((__packed__, __may_alias__)); |
61 | return ((const struct __wasm_v128_load_struct *)__mem)->__v; |
62 | } |
63 | |
64 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
65 | wasm_v128_load8_splat(const void *__mem) { |
66 | struct __wasm_v128_load8_splat_struct { |
67 | uint8_t __v; |
68 | } __attribute__((__packed__, __may_alias__)); |
69 | uint8_t __v = ((const struct __wasm_v128_load8_splat_struct *)__mem)->__v; |
70 | return (v128_t)(__u8x16){__v, __v, __v, __v, __v, __v, __v, __v, |
71 | __v, __v, __v, __v, __v, __v, __v, __v}; |
72 | } |
73 | |
74 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
75 | wasm_v128_load16_splat(const void *__mem) { |
76 | struct __wasm_v128_load16_splat_struct { |
77 | uint16_t __v; |
78 | } __attribute__((__packed__, __may_alias__)); |
79 | uint16_t __v = ((const struct __wasm_v128_load16_splat_struct *)__mem)->__v; |
80 | return (v128_t)(__u16x8){__v, __v, __v, __v, __v, __v, __v, __v}; |
81 | } |
82 | |
83 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
84 | wasm_v128_load32_splat(const void *__mem) { |
85 | struct __wasm_v128_load32_splat_struct { |
86 | uint32_t __v; |
87 | } __attribute__((__packed__, __may_alias__)); |
88 | uint32_t __v = ((const struct __wasm_v128_load32_splat_struct *)__mem)->__v; |
89 | return (v128_t)(__u32x4){__v, __v, __v, __v}; |
90 | } |
91 | |
92 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
93 | wasm_v128_load64_splat(const void *__mem) { |
94 | struct __wasm_v128_load64_splat_struct { |
95 | uint64_t __v; |
96 | } __attribute__((__packed__, __may_alias__)); |
97 | uint64_t __v = ((const struct __wasm_v128_load64_splat_struct *)__mem)->__v; |
98 | return (v128_t)(__u64x2){__v, __v}; |
99 | } |
100 | |
101 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
102 | wasm_i16x8_load8x8(const void *__mem) { |
103 | struct __wasm_i16x8_load8x8_struct { |
104 | __i8x8 __v; |
105 | } __attribute__((__packed__, __may_alias__)); |
106 | __i8x8 __v = ((const struct __wasm_i16x8_load8x8_struct *)__mem)->__v; |
107 | return (v128_t) __builtin_convertvector(__v, __i16x8); |
108 | } |
109 | |
110 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
111 | wasm_u16x8_load8x8(const void *__mem) { |
112 | struct __wasm_u16x8_load8x8_struct { |
113 | __u8x8 __v; |
114 | } __attribute__((__packed__, __may_alias__)); |
115 | __u8x8 __v = ((const struct __wasm_u16x8_load8x8_struct *)__mem)->__v; |
116 | return (v128_t) __builtin_convertvector(__v, __u16x8); |
117 | } |
118 | |
119 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
120 | wasm_i32x4_load16x4(const void *__mem) { |
121 | struct __wasm_i32x4_load16x4_struct { |
122 | __i16x4 __v; |
123 | } __attribute__((__packed__, __may_alias__)); |
124 | __i16x4 __v = ((const struct __wasm_i32x4_load16x4_struct *)__mem)->__v; |
125 | return (v128_t) __builtin_convertvector(__v, __i32x4); |
126 | } |
127 | |
128 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
129 | wasm_u32x4_load16x4(const void *__mem) { |
130 | struct __wasm_u32x4_load16x4_struct { |
131 | __u16x4 __v; |
132 | } __attribute__((__packed__, __may_alias__)); |
133 | __u16x4 __v = ((const struct __wasm_u32x4_load16x4_struct *)__mem)->__v; |
134 | return (v128_t) __builtin_convertvector(__v, __u32x4); |
135 | } |
136 | |
137 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
138 | wasm_i64x2_load32x2(const void *__mem) { |
139 | struct __wasm_i64x2_load32x2_struct { |
140 | __i32x2 __v; |
141 | } __attribute__((__packed__, __may_alias__)); |
142 | __i32x2 __v = ((const struct __wasm_i64x2_load32x2_struct *)__mem)->__v; |
143 | return (v128_t) __builtin_convertvector(__v, __i64x2); |
144 | } |
145 | |
146 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
147 | wasm_u64x2_load32x2(const void *__mem) { |
148 | struct __wasm_u64x2_load32x2_struct { |
149 | __u32x2 __v; |
150 | } __attribute__((__packed__, __may_alias__)); |
151 | __u32x2 __v = ((const struct __wasm_u64x2_load32x2_struct *)__mem)->__v; |
152 | return (v128_t) __builtin_convertvector(__v, __u64x2); |
153 | } |
154 | |
155 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
156 | wasm_v128_load32_zero(const void *__mem) { |
157 | struct __wasm_v128_load32_zero_struct { |
158 | int32_t __v; |
159 | } __attribute__((__packed__, __may_alias__)); |
160 | int32_t __v = ((const struct __wasm_v128_load32_zero_struct *)__mem)->__v; |
161 | return (v128_t)(__i32x4){__v, 0, 0, 0}; |
162 | } |
163 | |
164 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
165 | wasm_v128_load64_zero(const void *__mem) { |
166 | struct __wasm_v128_load64_zero_struct { |
167 | int64_t __v; |
168 | } __attribute__((__packed__, __may_alias__)); |
169 | int64_t __v = ((const struct __wasm_v128_load64_zero_struct *)__mem)->__v; |
170 | return (v128_t)(__i64x2){__v, 0}; |
171 | } |
172 | |
173 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load8_lane( |
174 | const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) { |
175 | struct __wasm_v128_load8_lane_struct { |
176 | int8_t __v; |
177 | } __attribute__((__packed__, __may_alias__)); |
178 | int8_t __v = ((const struct __wasm_v128_load8_lane_struct *)__mem)->__v; |
179 | __i8x16 __ret = (__i8x16)__vec; |
180 | __ret[__i] = __v; |
181 | return (v128_t)__ret; |
182 | } |
183 | |
184 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load16_lane( |
185 | const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) { |
186 | struct __wasm_v128_load16_lane_struct { |
187 | int16_t __v; |
188 | } __attribute__((__packed__, __may_alias__)); |
189 | int16_t __v = ((const struct __wasm_v128_load16_lane_struct *)__mem)->__v; |
190 | __i16x8 __ret = (__i16x8)__vec; |
191 | __ret[__i] = __v; |
192 | return (v128_t)__ret; |
193 | } |
194 | |
195 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load32_lane( |
196 | const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) { |
197 | struct __wasm_v128_load32_lane_struct { |
198 | int32_t __v; |
199 | } __attribute__((__packed__, __may_alias__)); |
200 | int32_t __v = ((const struct __wasm_v128_load32_lane_struct *)__mem)->__v; |
201 | __i32x4 __ret = (__i32x4)__vec; |
202 | __ret[__i] = __v; |
203 | return (v128_t)__ret; |
204 | } |
205 | |
206 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_load64_lane( |
207 | const void *__mem, v128_t __vec, int __i) __REQUIRE_CONSTANT(__i) { |
208 | struct __wasm_v128_load64_lane_struct { |
209 | int64_t __v; |
210 | } __attribute__((__packed__, __may_alias__)); |
211 | int64_t __v = ((const struct __wasm_v128_load64_lane_struct *)__mem)->__v; |
212 | __i64x2 __ret = (__i64x2)__vec; |
213 | __ret[__i] = __v; |
214 | return (v128_t)__ret; |
215 | } |
216 | |
217 | static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store(void *__mem, |
218 | v128_t __a) { |
219 | // UB-free unaligned access copied from xmmintrin.h |
220 | struct __wasm_v128_store_struct { |
221 | __v128_u __v; |
222 | } __attribute__((__packed__, __may_alias__)); |
223 | ((struct __wasm_v128_store_struct *)__mem)->__v = __a; |
224 | } |
225 | |
226 | static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store8_lane(void *__mem, |
227 | v128_t __vec, |
228 | int __i) |
229 | __REQUIRE_CONSTANT(__i) { |
230 | struct __wasm_v128_store8_lane_struct { |
231 | int8_t __v; |
232 | } __attribute__((__packed__, __may_alias__)); |
233 | ((struct __wasm_v128_store8_lane_struct *)__mem)->__v = ((__i8x16)__vec)[__i]; |
234 | } |
235 | |
236 | static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store16_lane(void *__mem, |
237 | v128_t __vec, |
238 | int __i) |
239 | __REQUIRE_CONSTANT(__i) { |
240 | struct __wasm_v128_store16_lane_struct { |
241 | int16_t __v; |
242 | } __attribute__((__packed__, __may_alias__)); |
243 | ((struct __wasm_v128_store16_lane_struct *)__mem)->__v = |
244 | ((__i16x8)__vec)[__i]; |
245 | } |
246 | |
247 | static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store32_lane(void *__mem, |
248 | v128_t __vec, |
249 | int __i) |
250 | __REQUIRE_CONSTANT(__i) { |
251 | struct __wasm_v128_store32_lane_struct { |
252 | int32_t __v; |
253 | } __attribute__((__packed__, __may_alias__)); |
254 | ((struct __wasm_v128_store32_lane_struct *)__mem)->__v = |
255 | ((__i32x4)__vec)[__i]; |
256 | } |
257 | |
258 | static __inline__ void __DEFAULT_FN_ATTRS wasm_v128_store64_lane(void *__mem, |
259 | v128_t __vec, |
260 | int __i) |
261 | __REQUIRE_CONSTANT(__i) { |
262 | struct __wasm_v128_store64_lane_struct { |
263 | int64_t __v; |
264 | } __attribute__((__packed__, __may_alias__)); |
265 | ((struct __wasm_v128_store64_lane_struct *)__mem)->__v = |
266 | ((__i64x2)__vec)[__i]; |
267 | } |
268 | |
269 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
270 | wasm_i8x16_make(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3, int8_t __c4, |
271 | int8_t __c5, int8_t __c6, int8_t __c7, int8_t __c8, int8_t __c9, |
272 | int8_t __c10, int8_t __c11, int8_t __c12, int8_t __c13, |
273 | int8_t __c14, int8_t __c15) { |
274 | return (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5, |
275 | __c6, __c7, __c8, __c9, __c10, __c11, |
276 | __c12, __c13, __c14, __c15}; |
277 | } |
278 | |
279 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
280 | wasm_u8x16_make(uint8_t __c0, uint8_t __c1, uint8_t __c2, uint8_t __c3, |
281 | uint8_t __c4, uint8_t __c5, uint8_t __c6, uint8_t __c7, |
282 | uint8_t __c8, uint8_t __c9, uint8_t __c10, uint8_t __c11, |
283 | uint8_t __c12, uint8_t __c13, uint8_t __c14, uint8_t __c15) { |
284 | return (v128_t)(__u8x16){__c0, __c1, __c2, __c3, __c4, __c5, |
285 | __c6, __c7, __c8, __c9, __c10, __c11, |
286 | __c12, __c13, __c14, __c15}; |
287 | } |
288 | |
289 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
290 | wasm_i16x8_make(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3, |
291 | int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7) { |
292 | return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; |
293 | } |
294 | |
295 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
296 | wasm_u16x8_make(uint16_t __c0, uint16_t __c1, uint16_t __c2, uint16_t __c3, |
297 | uint16_t __c4, uint16_t __c5, uint16_t __c6, uint16_t __c7) { |
298 | return (v128_t)(__u16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; |
299 | } |
300 | |
301 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_make(int32_t __c0, |
302 | int32_t __c1, |
303 | int32_t __c2, |
304 | int32_t __c3) { |
305 | return (v128_t)(__i32x4){__c0, __c1, __c2, __c3}; |
306 | } |
307 | |
308 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_make(uint32_t __c0, |
309 | uint32_t __c1, |
310 | uint32_t __c2, |
311 | uint32_t __c3) { |
312 | return (v128_t)(__u32x4){__c0, __c1, __c2, __c3}; |
313 | } |
314 | |
315 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_make(int64_t __c0, |
316 | int64_t __c1) { |
317 | return (v128_t)(__i64x2){__c0, __c1}; |
318 | } |
319 | |
320 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_make(uint64_t __c0, |
321 | uint64_t __c1) { |
322 | return (v128_t)(__u64x2){__c0, __c1}; |
323 | } |
324 | |
325 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_make(float __c0, |
326 | float __c1, |
327 | float __c2, |
328 | float __c3) { |
329 | return (v128_t)(__f32x4){__c0, __c1, __c2, __c3}; |
330 | } |
331 | |
332 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_make(double __c0, |
333 | double __c1) { |
334 | return (v128_t)(__f64x2){__c0, __c1}; |
335 | } |
336 | |
337 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
338 | wasm_i8x16_const(int8_t __c0, int8_t __c1, int8_t __c2, int8_t __c3, |
339 | int8_t __c4, int8_t __c5, int8_t __c6, int8_t __c7, |
340 | int8_t __c8, int8_t __c9, int8_t __c10, int8_t __c11, |
341 | int8_t __c12, int8_t __c13, int8_t __c14, int8_t __c15) |
342 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
343 | __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4) |
344 | __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6) |
345 | __REQUIRE_CONSTANT(__c7) __REQUIRE_CONSTANT(__c8) |
346 | __REQUIRE_CONSTANT(__c9) __REQUIRE_CONSTANT(__c10) |
347 | __REQUIRE_CONSTANT(__c11) __REQUIRE_CONSTANT(__c12) |
348 | __REQUIRE_CONSTANT(__c13) __REQUIRE_CONSTANT(__c14) |
349 | __REQUIRE_CONSTANT(__c15) { |
350 | return (v128_t)(__i8x16){__c0, __c1, __c2, __c3, __c4, __c5, |
351 | __c6, __c7, __c8, __c9, __c10, __c11, |
352 | __c12, __c13, __c14, __c15}; |
353 | } |
354 | |
355 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
356 | wasm_u8x16_const(uint8_t __c0, uint8_t __c1, uint8_t __c2, uint8_t __c3, |
357 | uint8_t __c4, uint8_t __c5, uint8_t __c6, uint8_t __c7, |
358 | uint8_t __c8, uint8_t __c9, uint8_t __c10, uint8_t __c11, |
359 | uint8_t __c12, uint8_t __c13, uint8_t __c14, uint8_t __c15) |
360 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
361 | __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4) |
362 | __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6) |
363 | __REQUIRE_CONSTANT(__c7) __REQUIRE_CONSTANT(__c8) |
364 | __REQUIRE_CONSTANT(__c9) __REQUIRE_CONSTANT(__c10) |
365 | __REQUIRE_CONSTANT(__c11) __REQUIRE_CONSTANT(__c12) |
366 | __REQUIRE_CONSTANT(__c13) __REQUIRE_CONSTANT(__c14) |
367 | __REQUIRE_CONSTANT(__c15) { |
368 | return (v128_t)(__u8x16){__c0, __c1, __c2, __c3, __c4, __c5, |
369 | __c6, __c7, __c8, __c9, __c10, __c11, |
370 | __c12, __c13, __c14, __c15}; |
371 | } |
372 | |
373 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
374 | wasm_i16x8_const(int16_t __c0, int16_t __c1, int16_t __c2, int16_t __c3, |
375 | int16_t __c4, int16_t __c5, int16_t __c6, int16_t __c7) |
376 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
377 | __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4) |
378 | __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6) |
379 | __REQUIRE_CONSTANT(__c7) { |
380 | return (v128_t)(__i16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; |
381 | } |
382 | |
383 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
384 | wasm_u16x8_const(uint16_t __c0, uint16_t __c1, uint16_t __c2, uint16_t __c3, |
385 | uint16_t __c4, uint16_t __c5, uint16_t __c6, uint16_t __c7) |
386 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
387 | __REQUIRE_CONSTANT(__c3) __REQUIRE_CONSTANT(__c4) |
388 | __REQUIRE_CONSTANT(__c5) __REQUIRE_CONSTANT(__c6) |
389 | __REQUIRE_CONSTANT(__c7) { |
390 | return (v128_t)(__u16x8){__c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7}; |
391 | } |
392 | |
393 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
394 | wasm_i32x4_const(int32_t __c0, int32_t __c1, int32_t __c2, int32_t __c3) |
395 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
396 | __REQUIRE_CONSTANT(__c3) { |
397 | return (v128_t)(__i32x4){__c0, __c1, __c2, __c3}; |
398 | } |
399 | |
400 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
401 | wasm_u32x4_const(uint32_t __c0, uint32_t __c1, uint32_t __c2, uint32_t __c3) |
402 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
403 | __REQUIRE_CONSTANT(__c3) { |
404 | return (v128_t)(__u32x4){__c0, __c1, __c2, __c3}; |
405 | } |
406 | |
407 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const(int64_t __c0, |
408 | int64_t __c1) |
409 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) { |
410 | return (v128_t)(__i64x2){__c0, __c1}; |
411 | } |
412 | |
413 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_const(uint64_t __c0, |
414 | uint64_t __c1) |
415 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) { |
416 | return (v128_t)(__u64x2){__c0, __c1}; |
417 | } |
418 | |
419 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
420 | wasm_f32x4_const(float __c0, float __c1, float __c2, float __c3) |
421 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) __REQUIRE_CONSTANT(__c2) |
422 | __REQUIRE_CONSTANT(__c3) { |
423 | return (v128_t)(__f32x4){__c0, __c1, __c2, __c3}; |
424 | } |
425 | |
426 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const(double __c0, |
427 | double __c1) |
428 | __REQUIRE_CONSTANT(__c0) __REQUIRE_CONSTANT(__c1) { |
429 | return (v128_t)(__f64x2){__c0, __c1}; |
430 | } |
431 | |
432 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_const_splat(int8_t __c) |
433 | __REQUIRE_CONSTANT(__c) { |
434 | return (v128_t)(__i8x16){__c, __c, __c, __c, __c, __c, __c, __c, |
435 | __c, __c, __c, __c, __c, __c, __c, __c}; |
436 | } |
437 | |
438 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_const_splat(uint8_t __c) |
439 | __REQUIRE_CONSTANT(__c) { |
440 | return (v128_t)(__u8x16){__c, __c, __c, __c, __c, __c, __c, __c, |
441 | __c, __c, __c, __c, __c, __c, __c, __c}; |
442 | } |
443 | |
444 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_const_splat(int16_t __c) |
445 | __REQUIRE_CONSTANT(__c) { |
446 | return (v128_t)(__i16x8){__c, __c, __c, __c, __c, __c, __c, __c}; |
447 | } |
448 | |
449 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_const_splat(uint16_t __c) |
450 | __REQUIRE_CONSTANT(__c) { |
451 | return (v128_t)(__u16x8){__c, __c, __c, __c, __c, __c, __c, __c}; |
452 | } |
453 | |
454 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_const_splat(int32_t __c) |
455 | __REQUIRE_CONSTANT(__c) { |
456 | return (v128_t)(__i32x4){__c, __c, __c, __c}; |
457 | } |
458 | |
459 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_const_splat(uint32_t __c) |
460 | __REQUIRE_CONSTANT(__c) { |
461 | return (v128_t)(__u32x4){__c, __c, __c, __c}; |
462 | } |
463 | |
464 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_const_splat(int64_t __c) |
465 | __REQUIRE_CONSTANT(__c) { |
466 | return (v128_t)(__i64x2){__c, __c}; |
467 | } |
468 | |
469 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_const_splat(uint64_t __c) |
470 | __REQUIRE_CONSTANT(__c) { |
471 | return (v128_t)(__u64x2){__c, __c}; |
472 | } |
473 | |
474 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_const_splat(float __c) |
475 | __REQUIRE_CONSTANT(__c) { |
476 | return (v128_t)(__f32x4){__c, __c, __c, __c}; |
477 | } |
478 | |
479 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_const_splat(double __c) |
480 | __REQUIRE_CONSTANT(__c) { |
481 | return (v128_t)(__f64x2){__c, __c}; |
482 | } |
483 | |
484 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_splat(int8_t __a) { |
485 | return (v128_t)(__i8x16){__a, __a, __a, __a, __a, __a, __a, __a, |
486 | __a, __a, __a, __a, __a, __a, __a, __a}; |
487 | } |
488 | |
489 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_splat(uint8_t __a) { |
490 | return (v128_t)(__u8x16){__a, __a, __a, __a, __a, __a, __a, __a, |
491 | __a, __a, __a, __a, __a, __a, __a, __a}; |
492 | } |
493 | |
494 | static __inline__ int8_t __DEFAULT_FN_ATTRS wasm_i8x16_extract_lane(v128_t __a, |
495 | int __i) |
496 | __REQUIRE_CONSTANT(__i) { |
497 | return ((__i8x16)__a)[__i]; |
498 | } |
499 | |
500 | static __inline__ uint8_t __DEFAULT_FN_ATTRS wasm_u8x16_extract_lane(v128_t __a, |
501 | int __i) |
502 | __REQUIRE_CONSTANT(__i) { |
503 | return ((__u8x16)__a)[__i]; |
504 | } |
505 | |
506 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_replace_lane(v128_t __a, |
507 | int __i, |
508 | int8_t __b) |
509 | __REQUIRE_CONSTANT(__i) { |
510 | __i8x16 __v = (__i8x16)__a; |
511 | __v[__i] = __b; |
512 | return (v128_t)__v; |
513 | } |
514 | |
515 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_replace_lane(v128_t __a, |
516 | int __i, |
517 | uint8_t __b) |
518 | __REQUIRE_CONSTANT(__i) { |
519 | __u8x16 __v = (__u8x16)__a; |
520 | __v[__i] = __b; |
521 | return (v128_t)__v; |
522 | } |
523 | |
524 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_splat(int16_t __a) { |
525 | return (v128_t)(__i16x8){__a, __a, __a, __a, __a, __a, __a, __a}; |
526 | } |
527 | |
528 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_splat(uint16_t __a) { |
529 | return (v128_t)(__u16x8){__a, __a, __a, __a, __a, __a, __a, __a}; |
530 | } |
531 | |
532 | static __inline__ int16_t __DEFAULT_FN_ATTRS wasm_i16x8_extract_lane(v128_t __a, |
533 | int __i) |
534 | __REQUIRE_CONSTANT(__i) { |
535 | return ((__i16x8)__a)[__i]; |
536 | } |
537 | |
538 | static __inline__ uint16_t __DEFAULT_FN_ATTRS |
539 | wasm_u16x8_extract_lane(v128_t __a, int __i) __REQUIRE_CONSTANT(__i) { |
540 | return ((__u16x8)__a)[__i]; |
541 | } |
542 | |
543 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_replace_lane(v128_t __a, |
544 | int __i, |
545 | int16_t __b) |
546 | __REQUIRE_CONSTANT(__i) { |
547 | __i16x8 __v = (__i16x8)__a; |
548 | __v[__i] = __b; |
549 | return (v128_t)__v; |
550 | } |
551 | |
552 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_replace_lane( |
553 | v128_t __a, int __i, uint16_t __b) __REQUIRE_CONSTANT(__i) { |
554 | __u16x8 __v = (__u16x8)__a; |
555 | __v[__i] = __b; |
556 | return (v128_t)__v; |
557 | } |
558 | |
559 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_splat(int32_t __a) { |
560 | return (v128_t)(__i32x4){__a, __a, __a, __a}; |
561 | } |
562 | |
563 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_splat(uint32_t __a) { |
564 | return (v128_t)(__u32x4){__a, __a, __a, __a}; |
565 | } |
566 | |
567 | static __inline__ int32_t __DEFAULT_FN_ATTRS wasm_i32x4_extract_lane(v128_t __a, |
568 | int __i) |
569 | __REQUIRE_CONSTANT(__i) { |
570 | return ((__i32x4)__a)[__i]; |
571 | } |
572 | |
573 | static __inline__ uint32_t __DEFAULT_FN_ATTRS |
574 | wasm_u32x4_extract_lane(v128_t __a, int __i) __REQUIRE_CONSTANT(__i) { |
575 | return ((__u32x4)__a)[__i]; |
576 | } |
577 | |
578 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_replace_lane(v128_t __a, |
579 | int __i, |
580 | int32_t __b) |
581 | __REQUIRE_CONSTANT(__i) { |
582 | __i32x4 __v = (__i32x4)__a; |
583 | __v[__i] = __b; |
584 | return (v128_t)__v; |
585 | } |
586 | |
587 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_replace_lane( |
588 | v128_t __a, int __i, uint32_t __b) __REQUIRE_CONSTANT(__i) { |
589 | __u32x4 __v = (__u32x4)__a; |
590 | __v[__i] = __b; |
591 | return (v128_t)__v; |
592 | } |
593 | |
594 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_splat(int64_t __a) { |
595 | return (v128_t)(__i64x2){__a, __a}; |
596 | } |
597 | |
598 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_splat(uint64_t __a) { |
599 | return (v128_t)(__u64x2){__a, __a}; |
600 | } |
601 | |
602 | static __inline__ int64_t __DEFAULT_FN_ATTRS wasm_i64x2_extract_lane(v128_t __a, |
603 | int __i) |
604 | __REQUIRE_CONSTANT(__i) { |
605 | return ((__i64x2)__a)[__i]; |
606 | } |
607 | |
608 | static __inline__ uint64_t __DEFAULT_FN_ATTRS |
609 | wasm_u64x2_extract_lane(v128_t __a, int __i) __REQUIRE_CONSTANT(__i) { |
610 | return ((__u64x2)__a)[__i]; |
611 | } |
612 | |
613 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_replace_lane(v128_t __a, |
614 | int __i, |
615 | int64_t __b) |
616 | __REQUIRE_CONSTANT(__i) { |
617 | __i64x2 __v = (__i64x2)__a; |
618 | __v[__i] = __b; |
619 | return (v128_t)__v; |
620 | } |
621 | |
622 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_replace_lane( |
623 | v128_t __a, int __i, uint64_t __b) __REQUIRE_CONSTANT(__i) { |
624 | __u64x2 __v = (__u64x2)__a; |
625 | __v[__i] = __b; |
626 | return (v128_t)__v; |
627 | } |
628 | |
629 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_splat(float __a) { |
630 | return (v128_t)(__f32x4){__a, __a, __a, __a}; |
631 | } |
632 | |
633 | static __inline__ float __DEFAULT_FN_ATTRS wasm_f32x4_extract_lane(v128_t __a, |
634 | int __i) |
635 | __REQUIRE_CONSTANT(__i) { |
636 | return ((__f32x4)__a)[__i]; |
637 | } |
638 | |
639 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_replace_lane(v128_t __a, |
640 | int __i, |
641 | float __b) |
642 | __REQUIRE_CONSTANT(__i) { |
643 | __f32x4 __v = (__f32x4)__a; |
644 | __v[__i] = __b; |
645 | return (v128_t)__v; |
646 | } |
647 | |
648 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_splat(double __a) { |
649 | return (v128_t)(__f64x2){__a, __a}; |
650 | } |
651 | |
652 | static __inline__ double __DEFAULT_FN_ATTRS wasm_f64x2_extract_lane(v128_t __a, |
653 | int __i) |
654 | __REQUIRE_CONSTANT(__i) { |
655 | return ((__f64x2)__a)[__i]; |
656 | } |
657 | |
658 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_replace_lane(v128_t __a, |
659 | int __i, |
660 | double __b) |
661 | __REQUIRE_CONSTANT(__i) { |
662 | __f64x2 __v = (__f64x2)__a; |
663 | __v[__i] = __b; |
664 | return (v128_t)__v; |
665 | } |
666 | |
667 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_eq(v128_t __a, |
668 | v128_t __b) { |
669 | return (v128_t)((__i8x16)__a == (__i8x16)__b); |
670 | } |
671 | |
672 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ne(v128_t __a, |
673 | v128_t __b) { |
674 | return (v128_t)((__i8x16)__a != (__i8x16)__b); |
675 | } |
676 | |
677 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_lt(v128_t __a, |
678 | v128_t __b) { |
679 | return (v128_t)((__i8x16)__a < (__i8x16)__b); |
680 | } |
681 | |
682 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_lt(v128_t __a, |
683 | v128_t __b) { |
684 | return (v128_t)((__u8x16)__a < (__u8x16)__b); |
685 | } |
686 | |
687 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_gt(v128_t __a, |
688 | v128_t __b) { |
689 | return (v128_t)((__i8x16)__a > (__i8x16)__b); |
690 | } |
691 | |
692 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_gt(v128_t __a, |
693 | v128_t __b) { |
694 | return (v128_t)((__u8x16)__a > (__u8x16)__b); |
695 | } |
696 | |
697 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_le(v128_t __a, |
698 | v128_t __b) { |
699 | return (v128_t)((__i8x16)__a <= (__i8x16)__b); |
700 | } |
701 | |
702 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_le(v128_t __a, |
703 | v128_t __b) { |
704 | return (v128_t)((__u8x16)__a <= (__u8x16)__b); |
705 | } |
706 | |
707 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_ge(v128_t __a, |
708 | v128_t __b) { |
709 | return (v128_t)((__i8x16)__a >= (__i8x16)__b); |
710 | } |
711 | |
712 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_ge(v128_t __a, |
713 | v128_t __b) { |
714 | return (v128_t)((__u8x16)__a >= (__u8x16)__b); |
715 | } |
716 | |
717 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_eq(v128_t __a, |
718 | v128_t __b) { |
719 | return (v128_t)((__i16x8)__a == (__i16x8)__b); |
720 | } |
721 | |
722 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ne(v128_t __a, |
723 | v128_t __b) { |
724 | return (v128_t)((__u16x8)__a != (__u16x8)__b); |
725 | } |
726 | |
727 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_lt(v128_t __a, |
728 | v128_t __b) { |
729 | return (v128_t)((__i16x8)__a < (__i16x8)__b); |
730 | } |
731 | |
732 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_lt(v128_t __a, |
733 | v128_t __b) { |
734 | return (v128_t)((__u16x8)__a < (__u16x8)__b); |
735 | } |
736 | |
737 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_gt(v128_t __a, |
738 | v128_t __b) { |
739 | return (v128_t)((__i16x8)__a > (__i16x8)__b); |
740 | } |
741 | |
742 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_gt(v128_t __a, |
743 | v128_t __b) { |
744 | return (v128_t)((__u16x8)__a > (__u16x8)__b); |
745 | } |
746 | |
747 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_le(v128_t __a, |
748 | v128_t __b) { |
749 | return (v128_t)((__i16x8)__a <= (__i16x8)__b); |
750 | } |
751 | |
752 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_le(v128_t __a, |
753 | v128_t __b) { |
754 | return (v128_t)((__u16x8)__a <= (__u16x8)__b); |
755 | } |
756 | |
757 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_ge(v128_t __a, |
758 | v128_t __b) { |
759 | return (v128_t)((__i16x8)__a >= (__i16x8)__b); |
760 | } |
761 | |
762 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_ge(v128_t __a, |
763 | v128_t __b) { |
764 | return (v128_t)((__u16x8)__a >= (__u16x8)__b); |
765 | } |
766 | |
767 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_eq(v128_t __a, |
768 | v128_t __b) { |
769 | return (v128_t)((__i32x4)__a == (__i32x4)__b); |
770 | } |
771 | |
772 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ne(v128_t __a, |
773 | v128_t __b) { |
774 | return (v128_t)((__i32x4)__a != (__i32x4)__b); |
775 | } |
776 | |
777 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_lt(v128_t __a, |
778 | v128_t __b) { |
779 | return (v128_t)((__i32x4)__a < (__i32x4)__b); |
780 | } |
781 | |
782 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_lt(v128_t __a, |
783 | v128_t __b) { |
784 | return (v128_t)((__u32x4)__a < (__u32x4)__b); |
785 | } |
786 | |
787 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_gt(v128_t __a, |
788 | v128_t __b) { |
789 | return (v128_t)((__i32x4)__a > (__i32x4)__b); |
790 | } |
791 | |
792 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_gt(v128_t __a, |
793 | v128_t __b) { |
794 | return (v128_t)((__u32x4)__a > (__u32x4)__b); |
795 | } |
796 | |
797 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_le(v128_t __a, |
798 | v128_t __b) { |
799 | return (v128_t)((__i32x4)__a <= (__i32x4)__b); |
800 | } |
801 | |
802 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_le(v128_t __a, |
803 | v128_t __b) { |
804 | return (v128_t)((__u32x4)__a <= (__u32x4)__b); |
805 | } |
806 | |
807 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_ge(v128_t __a, |
808 | v128_t __b) { |
809 | return (v128_t)((__i32x4)__a >= (__i32x4)__b); |
810 | } |
811 | |
812 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_ge(v128_t __a, |
813 | v128_t __b) { |
814 | return (v128_t)((__u32x4)__a >= (__u32x4)__b); |
815 | } |
816 | |
817 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_eq(v128_t __a, |
818 | v128_t __b) { |
819 | return (v128_t)((__i64x2)__a == (__i64x2)__b); |
820 | } |
821 | |
822 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ne(v128_t __a, |
823 | v128_t __b) { |
824 | return (v128_t)((__i64x2)__a != (__i64x2)__b); |
825 | } |
826 | |
827 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_lt(v128_t __a, |
828 | v128_t __b) { |
829 | return (v128_t)((__i64x2)__a < (__i64x2)__b); |
830 | } |
831 | |
832 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_gt(v128_t __a, |
833 | v128_t __b) { |
834 | return (v128_t)((__i64x2)__a > (__i64x2)__b); |
835 | } |
836 | |
837 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_le(v128_t __a, |
838 | v128_t __b) { |
839 | return (v128_t)((__i64x2)__a <= (__i64x2)__b); |
840 | } |
841 | |
842 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_ge(v128_t __a, |
843 | v128_t __b) { |
844 | return (v128_t)((__i64x2)__a >= (__i64x2)__b); |
845 | } |
846 | |
847 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_eq(v128_t __a, |
848 | v128_t __b) { |
849 | return (v128_t)((__f32x4)__a == (__f32x4)__b); |
850 | } |
851 | |
852 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ne(v128_t __a, |
853 | v128_t __b) { |
854 | return (v128_t)((__f32x4)__a != (__f32x4)__b); |
855 | } |
856 | |
857 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_lt(v128_t __a, |
858 | v128_t __b) { |
859 | return (v128_t)((__f32x4)__a < (__f32x4)__b); |
860 | } |
861 | |
862 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_gt(v128_t __a, |
863 | v128_t __b) { |
864 | return (v128_t)((__f32x4)__a > (__f32x4)__b); |
865 | } |
866 | |
867 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_le(v128_t __a, |
868 | v128_t __b) { |
869 | return (v128_t)((__f32x4)__a <= (__f32x4)__b); |
870 | } |
871 | |
872 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ge(v128_t __a, |
873 | v128_t __b) { |
874 | return (v128_t)((__f32x4)__a >= (__f32x4)__b); |
875 | } |
876 | |
877 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_eq(v128_t __a, |
878 | v128_t __b) { |
879 | return (v128_t)((__f64x2)__a == (__f64x2)__b); |
880 | } |
881 | |
882 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ne(v128_t __a, |
883 | v128_t __b) { |
884 | return (v128_t)((__f64x2)__a != (__f64x2)__b); |
885 | } |
886 | |
887 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_lt(v128_t __a, |
888 | v128_t __b) { |
889 | return (v128_t)((__f64x2)__a < (__f64x2)__b); |
890 | } |
891 | |
892 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_gt(v128_t __a, |
893 | v128_t __b) { |
894 | return (v128_t)((__f64x2)__a > (__f64x2)__b); |
895 | } |
896 | |
897 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_le(v128_t __a, |
898 | v128_t __b) { |
899 | return (v128_t)((__f64x2)__a <= (__f64x2)__b); |
900 | } |
901 | |
902 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ge(v128_t __a, |
903 | v128_t __b) { |
904 | return (v128_t)((__f64x2)__a >= (__f64x2)__b); |
905 | } |
906 | |
907 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_not(v128_t __a) { |
908 | return ~__a; |
909 | } |
910 | |
911 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_and(v128_t __a, |
912 | v128_t __b) { |
913 | return __a & __b; |
914 | } |
915 | |
916 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_or(v128_t __a, |
917 | v128_t __b) { |
918 | return __a | __b; |
919 | } |
920 | |
921 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_xor(v128_t __a, |
922 | v128_t __b) { |
923 | return __a ^ __b; |
924 | } |
925 | |
926 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_andnot(v128_t __a, |
927 | v128_t __b) { |
928 | return __a & ~__b; |
929 | } |
930 | |
931 | static __inline__ bool __DEFAULT_FN_ATTRS wasm_v128_any_true(v128_t __a) { |
932 | return __builtin_wasm_any_true_v128((__i8x16)__a); |
933 | } |
934 | |
935 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_v128_bitselect(v128_t __a, |
936 | v128_t __b, |
937 | v128_t __mask) { |
938 | return (v128_t)__builtin_wasm_bitselect((__i32x4)__a, (__i32x4)__b, |
939 | (__i32x4)__mask); |
940 | } |
941 | |
942 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_abs(v128_t __a) { |
943 | return (v128_t)__builtin_wasm_abs_i8x16((__i8x16)__a); |
944 | } |
945 | |
946 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_neg(v128_t __a) { |
947 | return (v128_t)(-(__u8x16)__a); |
948 | } |
949 | |
950 | static __inline__ bool __DEFAULT_FN_ATTRS wasm_i8x16_all_true(v128_t __a) { |
951 | return __builtin_wasm_all_true_i8x16((__i8x16)__a); |
952 | } |
953 | |
954 | static __inline__ uint32_t __DEFAULT_FN_ATTRS wasm_i8x16_bitmask(v128_t __a) { |
955 | return __builtin_wasm_bitmask_i8x16((__i8x16)__a); |
956 | } |
957 | |
958 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_popcnt(v128_t __a) { |
959 | return (v128_t)__builtin_wasm_popcnt_i8x16((__i8x16)__a); |
960 | } |
961 | |
962 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shl(v128_t __a, |
963 | uint32_t __b) { |
964 | return (v128_t)((__i8x16)__a << (__b & 0x7)); |
965 | } |
966 | |
967 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_shr(v128_t __a, |
968 | uint32_t __b) { |
969 | return (v128_t)((__i8x16)__a >> (__b & 0x7)); |
970 | } |
971 | |
972 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_shr(v128_t __a, |
973 | uint32_t __b) { |
974 | return (v128_t)((__u8x16)__a >> (__b & 0x7)); |
975 | } |
976 | |
977 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add(v128_t __a, |
978 | v128_t __b) { |
979 | return (v128_t)((__u8x16)__a + (__u8x16)__b); |
980 | } |
981 | |
982 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_add_sat(v128_t __a, |
983 | v128_t __b) { |
984 | return (v128_t)__builtin_wasm_add_sat_s_i8x16((__i8x16)__a, (__i8x16)__b); |
985 | } |
986 | |
987 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_add_sat(v128_t __a, |
988 | v128_t __b) { |
989 | return (v128_t)__builtin_wasm_add_sat_u_i8x16((__u8x16)__a, (__u8x16)__b); |
990 | } |
991 | |
992 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub(v128_t __a, |
993 | v128_t __b) { |
994 | return (v128_t)((__u8x16)__a - (__u8x16)__b); |
995 | } |
996 | |
997 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_sub_sat(v128_t __a, |
998 | v128_t __b) { |
999 | return (v128_t)__builtin_wasm_sub_sat_s_i8x16((__i8x16)__a, (__i8x16)__b); |
1000 | } |
1001 | |
1002 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_sub_sat(v128_t __a, |
1003 | v128_t __b) { |
1004 | return (v128_t)__builtin_wasm_sub_sat_u_i8x16((__u8x16)__a, (__u8x16)__b); |
1005 | } |
1006 | |
1007 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_min(v128_t __a, |
1008 | v128_t __b) { |
1009 | return (v128_t)__builtin_wasm_min_s_i8x16((__i8x16)__a, (__i8x16)__b); |
1010 | } |
1011 | |
1012 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_min(v128_t __a, |
1013 | v128_t __b) { |
1014 | return (v128_t)__builtin_wasm_min_u_i8x16((__u8x16)__a, (__u8x16)__b); |
1015 | } |
1016 | |
1017 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_max(v128_t __a, |
1018 | v128_t __b) { |
1019 | return (v128_t)__builtin_wasm_max_s_i8x16((__i8x16)__a, (__i8x16)__b); |
1020 | } |
1021 | |
1022 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_max(v128_t __a, |
1023 | v128_t __b) { |
1024 | return (v128_t)__builtin_wasm_max_u_i8x16((__u8x16)__a, (__u8x16)__b); |
1025 | } |
1026 | |
1027 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u8x16_avgr(v128_t __a, |
1028 | v128_t __b) { |
1029 | return (v128_t)__builtin_wasm_avgr_u_i8x16((__u8x16)__a, (__u8x16)__b); |
1030 | } |
1031 | |
1032 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_abs(v128_t __a) { |
1033 | return (v128_t)__builtin_wasm_abs_i16x8((__i16x8)__a); |
1034 | } |
1035 | |
1036 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_neg(v128_t __a) { |
1037 | return (v128_t)(-(__u16x8)__a); |
1038 | } |
1039 | |
1040 | static __inline__ bool __DEFAULT_FN_ATTRS wasm_i16x8_all_true(v128_t __a) { |
1041 | return __builtin_wasm_all_true_i16x8((__i16x8)__a); |
1042 | } |
1043 | |
1044 | static __inline__ uint32_t __DEFAULT_FN_ATTRS wasm_i16x8_bitmask(v128_t __a) { |
1045 | return __builtin_wasm_bitmask_i16x8((__i16x8)__a); |
1046 | } |
1047 | |
1048 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shl(v128_t __a, |
1049 | uint32_t __b) { |
1050 | return (v128_t)((__i16x8)__a << (__b & 0xF)); |
1051 | } |
1052 | |
1053 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_shr(v128_t __a, |
1054 | uint32_t __b) { |
1055 | return (v128_t)((__i16x8)__a >> (__b & 0xF)); |
1056 | } |
1057 | |
1058 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_shr(v128_t __a, |
1059 | uint32_t __b) { |
1060 | return (v128_t)((__u16x8)__a >> (__b & 0xF)); |
1061 | } |
1062 | |
1063 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add(v128_t __a, |
1064 | v128_t __b) { |
1065 | return (v128_t)((__u16x8)__a + (__u16x8)__b); |
1066 | } |
1067 | |
1068 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_add_sat(v128_t __a, |
1069 | v128_t __b) { |
1070 | return (v128_t)__builtin_wasm_add_sat_s_i16x8((__i16x8)__a, (__i16x8)__b); |
1071 | } |
1072 | |
1073 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_add_sat(v128_t __a, |
1074 | v128_t __b) { |
1075 | return (v128_t)__builtin_wasm_add_sat_u_i16x8((__u16x8)__a, (__u16x8)__b); |
1076 | } |
1077 | |
1078 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub(v128_t __a, |
1079 | v128_t __b) { |
1080 | return (v128_t)((__i16x8)__a - (__i16x8)__b); |
1081 | } |
1082 | |
1083 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_sub_sat(v128_t __a, |
1084 | v128_t __b) { |
1085 | return (v128_t)__builtin_wasm_sub_sat_s_i16x8((__i16x8)__a, (__i16x8)__b); |
1086 | } |
1087 | |
1088 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_sub_sat(v128_t __a, |
1089 | v128_t __b) { |
1090 | return (v128_t)__builtin_wasm_sub_sat_u_i16x8((__u16x8)__a, (__u16x8)__b); |
1091 | } |
1092 | |
1093 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_mul(v128_t __a, |
1094 | v128_t __b) { |
1095 | return (v128_t)((__u16x8)__a * (__u16x8)__b); |
1096 | } |
1097 | |
1098 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_min(v128_t __a, |
1099 | v128_t __b) { |
1100 | return (v128_t)__builtin_wasm_min_s_i16x8((__i16x8)__a, (__i16x8)__b); |
1101 | } |
1102 | |
1103 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_min(v128_t __a, |
1104 | v128_t __b) { |
1105 | return (v128_t)__builtin_wasm_min_u_i16x8((__u16x8)__a, (__u16x8)__b); |
1106 | } |
1107 | |
1108 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_max(v128_t __a, |
1109 | v128_t __b) { |
1110 | return (v128_t)__builtin_wasm_max_s_i16x8((__i16x8)__a, (__i16x8)__b); |
1111 | } |
1112 | |
1113 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_max(v128_t __a, |
1114 | v128_t __b) { |
1115 | return (v128_t)__builtin_wasm_max_u_i16x8((__u16x8)__a, (__u16x8)__b); |
1116 | } |
1117 | |
1118 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u16x8_avgr(v128_t __a, |
1119 | v128_t __b) { |
1120 | return (v128_t)__builtin_wasm_avgr_u_i16x8((__u16x8)__a, (__u16x8)__b); |
1121 | } |
1122 | |
1123 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_abs(v128_t __a) { |
1124 | return (v128_t)__builtin_wasm_abs_i32x4((__i32x4)__a); |
1125 | } |
1126 | |
1127 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_neg(v128_t __a) { |
1128 | return (v128_t)(-(__u32x4)__a); |
1129 | } |
1130 | |
1131 | static __inline__ bool __DEFAULT_FN_ATTRS wasm_i32x4_all_true(v128_t __a) { |
1132 | return __builtin_wasm_all_true_i32x4((__i32x4)__a); |
1133 | } |
1134 | |
1135 | static __inline__ uint32_t __DEFAULT_FN_ATTRS wasm_i32x4_bitmask(v128_t __a) { |
1136 | return __builtin_wasm_bitmask_i32x4((__i32x4)__a); |
1137 | } |
1138 | |
1139 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shl(v128_t __a, |
1140 | uint32_t __b) { |
1141 | return (v128_t)((__i32x4)__a << (__b & 0x1F)); |
1142 | } |
1143 | |
1144 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_shr(v128_t __a, |
1145 | uint32_t __b) { |
1146 | return (v128_t)((__i32x4)__a >> (__b & 0x1F)); |
1147 | } |
1148 | |
1149 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_shr(v128_t __a, |
1150 | uint32_t __b) { |
1151 | return (v128_t)((__u32x4)__a >> (__b & 0x1F)); |
1152 | } |
1153 | |
1154 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_add(v128_t __a, |
1155 | v128_t __b) { |
1156 | return (v128_t)((__u32x4)__a + (__u32x4)__b); |
1157 | } |
1158 | |
1159 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_sub(v128_t __a, |
1160 | v128_t __b) { |
1161 | return (v128_t)((__u32x4)__a - (__u32x4)__b); |
1162 | } |
1163 | |
1164 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_mul(v128_t __a, |
1165 | v128_t __b) { |
1166 | return (v128_t)((__u32x4)__a * (__u32x4)__b); |
1167 | } |
1168 | |
1169 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_min(v128_t __a, |
1170 | v128_t __b) { |
1171 | return (v128_t)__builtin_wasm_min_s_i32x4((__i32x4)__a, (__i32x4)__b); |
1172 | } |
1173 | |
1174 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_min(v128_t __a, |
1175 | v128_t __b) { |
1176 | return (v128_t)__builtin_wasm_min_u_i32x4((__u32x4)__a, (__u32x4)__b); |
1177 | } |
1178 | |
1179 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_max(v128_t __a, |
1180 | v128_t __b) { |
1181 | return (v128_t)__builtin_wasm_max_s_i32x4((__i32x4)__a, (__i32x4)__b); |
1182 | } |
1183 | |
1184 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u32x4_max(v128_t __a, |
1185 | v128_t __b) { |
1186 | return (v128_t)__builtin_wasm_max_u_i32x4((__u32x4)__a, (__u32x4)__b); |
1187 | } |
1188 | |
1189 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i32x4_dot_i16x8(v128_t __a, |
1190 | v128_t __b) { |
1191 | return (v128_t)__builtin_wasm_dot_s_i32x4_i16x8((__i16x8)__a, (__i16x8)__b); |
1192 | } |
1193 | |
1194 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_abs(v128_t __a) { |
1195 | return (v128_t)__builtin_wasm_abs_i64x2((__i64x2)__a); |
1196 | } |
1197 | |
1198 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_neg(v128_t __a) { |
1199 | return (v128_t)(-(__u64x2)__a); |
1200 | } |
1201 | |
1202 | static __inline__ bool __DEFAULT_FN_ATTRS wasm_i64x2_all_true(v128_t __a) { |
1203 | return __builtin_wasm_all_true_i64x2((__i64x2)__a); |
1204 | } |
1205 | |
1206 | static __inline__ uint32_t __DEFAULT_FN_ATTRS wasm_i64x2_bitmask(v128_t __a) { |
1207 | return __builtin_wasm_bitmask_i64x2((__i64x2)__a); |
1208 | } |
1209 | |
1210 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shl(v128_t __a, |
1211 | uint32_t __b) { |
1212 | return (v128_t)((__i64x2)__a << ((int64_t)__b & 0x3F)); |
1213 | } |
1214 | |
1215 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_shr(v128_t __a, |
1216 | uint32_t __b) { |
1217 | return (v128_t)((__i64x2)__a >> ((int64_t)__b & 0x3F)); |
1218 | } |
1219 | |
1220 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_u64x2_shr(v128_t __a, |
1221 | uint32_t __b) { |
1222 | return (v128_t)((__u64x2)__a >> ((int64_t)__b & 0x3F)); |
1223 | } |
1224 | |
1225 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_add(v128_t __a, |
1226 | v128_t __b) { |
1227 | return (v128_t)((__u64x2)__a + (__u64x2)__b); |
1228 | } |
1229 | |
1230 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_sub(v128_t __a, |
1231 | v128_t __b) { |
1232 | return (v128_t)((__u64x2)__a - (__u64x2)__b); |
1233 | } |
1234 | |
1235 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i64x2_mul(v128_t __a, |
1236 | v128_t __b) { |
1237 | return (v128_t)((__u64x2)__a * (__u64x2)__b); |
1238 | } |
1239 | |
1240 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_abs(v128_t __a) { |
1241 | return (v128_t)__builtin_wasm_abs_f32x4((__f32x4)__a); |
1242 | } |
1243 | |
1244 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_neg(v128_t __a) { |
1245 | return (v128_t)(-(__f32x4)__a); |
1246 | } |
1247 | |
1248 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sqrt(v128_t __a) { |
1249 | return (v128_t)__builtin_wasm_sqrt_f32x4((__f32x4)__a); |
1250 | } |
1251 | |
1252 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_ceil(v128_t __a) { |
1253 | return (v128_t)__builtin_wasm_ceil_f32x4((__f32x4)__a); |
1254 | } |
1255 | |
1256 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_floor(v128_t __a) { |
1257 | return (v128_t)__builtin_wasm_floor_f32x4((__f32x4)__a); |
1258 | } |
1259 | |
1260 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_trunc(v128_t __a) { |
1261 | return (v128_t)__builtin_wasm_trunc_f32x4((__f32x4)__a); |
1262 | } |
1263 | |
1264 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_nearest(v128_t __a) { |
1265 | return (v128_t)__builtin_wasm_nearest_f32x4((__f32x4)__a); |
1266 | } |
1267 | |
1268 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_add(v128_t __a, |
1269 | v128_t __b) { |
1270 | return (v128_t)((__f32x4)__a + (__f32x4)__b); |
1271 | } |
1272 | |
1273 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_sub(v128_t __a, |
1274 | v128_t __b) { |
1275 | return (v128_t)((__f32x4)__a - (__f32x4)__b); |
1276 | } |
1277 | |
1278 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_mul(v128_t __a, |
1279 | v128_t __b) { |
1280 | return (v128_t)((__f32x4)__a * (__f32x4)__b); |
1281 | } |
1282 | |
1283 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_div(v128_t __a, |
1284 | v128_t __b) { |
1285 | return (v128_t)((__f32x4)__a / (__f32x4)__b); |
1286 | } |
1287 | |
1288 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_min(v128_t __a, |
1289 | v128_t __b) { |
1290 | return (v128_t)__builtin_wasm_min_f32x4((__f32x4)__a, (__f32x4)__b); |
1291 | } |
1292 | |
1293 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_max(v128_t __a, |
1294 | v128_t __b) { |
1295 | return (v128_t)__builtin_wasm_max_f32x4((__f32x4)__a, (__f32x4)__b); |
1296 | } |
1297 | |
1298 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmin(v128_t __a, |
1299 | v128_t __b) { |
1300 | return (v128_t)__builtin_wasm_pmin_f32x4((__f32x4)__a, (__f32x4)__b); |
1301 | } |
1302 | |
1303 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f32x4_pmax(v128_t __a, |
1304 | v128_t __b) { |
1305 | return (v128_t)__builtin_wasm_pmax_f32x4((__f32x4)__a, (__f32x4)__b); |
1306 | } |
1307 | |
1308 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_abs(v128_t __a) { |
1309 | return (v128_t)__builtin_wasm_abs_f64x2((__f64x2)__a); |
1310 | } |
1311 | |
1312 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_neg(v128_t __a) { |
1313 | return (v128_t)(-(__f64x2)__a); |
1314 | } |
1315 | |
1316 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sqrt(v128_t __a) { |
1317 | return (v128_t)__builtin_wasm_sqrt_f64x2((__f64x2)__a); |
1318 | } |
1319 | |
1320 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_ceil(v128_t __a) { |
1321 | return (v128_t)__builtin_wasm_ceil_f64x2((__f64x2)__a); |
1322 | } |
1323 | |
1324 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_floor(v128_t __a) { |
1325 | return (v128_t)__builtin_wasm_floor_f64x2((__f64x2)__a); |
1326 | } |
1327 | |
1328 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_trunc(v128_t __a) { |
1329 | return (v128_t)__builtin_wasm_trunc_f64x2((__f64x2)__a); |
1330 | } |
1331 | |
1332 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_nearest(v128_t __a) { |
1333 | return (v128_t)__builtin_wasm_nearest_f64x2((__f64x2)__a); |
1334 | } |
1335 | |
1336 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_add(v128_t __a, |
1337 | v128_t __b) { |
1338 | return (v128_t)((__f64x2)__a + (__f64x2)__b); |
1339 | } |
1340 | |
1341 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_sub(v128_t __a, |
1342 | v128_t __b) { |
1343 | return (v128_t)((__f64x2)__a - (__f64x2)__b); |
1344 | } |
1345 | |
1346 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_mul(v128_t __a, |
1347 | v128_t __b) { |
1348 | return (v128_t)((__f64x2)__a * (__f64x2)__b); |
1349 | } |
1350 | |
1351 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_div(v128_t __a, |
1352 | v128_t __b) { |
1353 | return (v128_t)((__f64x2)__a / (__f64x2)__b); |
1354 | } |
1355 | |
1356 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_min(v128_t __a, |
1357 | v128_t __b) { |
1358 | return (v128_t)__builtin_wasm_min_f64x2((__f64x2)__a, (__f64x2)__b); |
1359 | } |
1360 | |
1361 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_max(v128_t __a, |
1362 | v128_t __b) { |
1363 | return (v128_t)__builtin_wasm_max_f64x2((__f64x2)__a, (__f64x2)__b); |
1364 | } |
1365 | |
1366 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmin(v128_t __a, |
1367 | v128_t __b) { |
1368 | return (v128_t)__builtin_wasm_pmin_f64x2((__f64x2)__a, (__f64x2)__b); |
1369 | } |
1370 | |
1371 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_f64x2_pmax(v128_t __a, |
1372 | v128_t __b) { |
1373 | return (v128_t)__builtin_wasm_pmax_f64x2((__f64x2)__a, (__f64x2)__b); |
1374 | } |
1375 | |
1376 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1377 | wasm_i32x4_trunc_sat_f32x4(v128_t __a) { |
1378 | return (v128_t)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4)__a); |
1379 | } |
1380 | |
1381 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1382 | wasm_u32x4_trunc_sat_f32x4(v128_t __a) { |
1383 | return (v128_t)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4)__a); |
1384 | } |
1385 | |
1386 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1387 | wasm_f32x4_convert_i32x4(v128_t __a) { |
1388 | return (v128_t) __builtin_convertvector((__i32x4)__a, __f32x4); |
1389 | } |
1390 | |
1391 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1392 | wasm_f32x4_convert_u32x4(v128_t __a) { |
1393 | return (v128_t) __builtin_convertvector((__u32x4)__a, __f32x4); |
1394 | } |
1395 | |
1396 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1397 | wasm_f64x2_convert_low_i32x4(v128_t __a) { |
1398 | return (v128_t) __builtin_convertvector((__i32x2){__a[0], __a[1]}, __f64x2); |
1399 | } |
1400 | |
1401 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1402 | wasm_f64x2_convert_low_u32x4(v128_t __a) { |
1403 | return (v128_t) __builtin_convertvector((__u32x2){__a[0], __a[1]}, __f64x2); |
1404 | } |
1405 | |
1406 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1407 | wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a) { |
1408 | return (v128_t)__builtin_wasm_trunc_sat_s_zero_f64x2_i32x4((__f64x2)__a); |
1409 | } |
1410 | |
1411 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1412 | wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a) { |
1413 | return (v128_t)__builtin_wasm_trunc_sat_u_zero_f64x2_i32x4((__f64x2)__a); |
1414 | } |
1415 | |
1416 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1417 | wasm_f32x4_demote_f64x2_zero(v128_t __a) { |
1418 | return (v128_t) __builtin_convertvector( |
1419 | __builtin_shufflevector((__f64x2)__a, (__f64x2){0, 0}, 0, 1, 2, 3), |
1420 | __f32x4); |
1421 | } |
1422 | |
1423 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1424 | wasm_f64x2_promote_low_f32x4(v128_t __a) { |
1425 | return (v128_t) __builtin_convertvector( |
1426 | (__f32x2){((__f32x4)__a)[0], ((__f32x4)__a)[1]}, __f64x2); |
1427 | } |
1428 | |
1429 | #define wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ |
1430 | __c7, __c8, __c9, __c10, __c11, __c12, __c13, \ |
1431 | __c14, __c15) \ |
1432 | ((v128_t)__builtin_wasm_shuffle_i8x16( \ |
1433 | (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5, \ |
1434 | __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)) |
1435 | |
1436 | #define wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ |
1437 | __c7) \ |
1438 | ((v128_t)__builtin_wasm_shuffle_i8x16( \ |
1439 | (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2, \ |
1440 | (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2, \ |
1441 | (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2, \ |
1442 | (__c7)*2 + 1)) |
1443 | |
1444 | #define wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \ |
1445 | ((v128_t)__builtin_wasm_shuffle_i8x16( \ |
1446 | (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2, \ |
1447 | (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3, \ |
1448 | (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4, \ |
1449 | (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3)) |
1450 | |
1451 | #define wasm_i64x2_shuffle(__a, __b, __c0, __c1) \ |
1452 | ((v128_t)__builtin_wasm_shuffle_i8x16( \ |
1453 | (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2, \ |
1454 | (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7, \ |
1455 | (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4, \ |
1456 | (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7)) |
1457 | |
1458 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i8x16_swizzle(v128_t __a, |
1459 | v128_t __b) { |
1460 | return (v128_t)__builtin_wasm_swizzle_i8x16((__i8x16)__a, (__i8x16)__b); |
1461 | } |
1462 | |
1463 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1464 | wasm_i8x16_narrow_i16x8(v128_t __a, v128_t __b) { |
1465 | return (v128_t)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8)__a, |
1466 | (__i16x8)__b); |
1467 | } |
1468 | |
1469 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1470 | wasm_u8x16_narrow_i16x8(v128_t __a, v128_t __b) { |
1471 | return (v128_t)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8)__a, |
1472 | (__i16x8)__b); |
1473 | } |
1474 | |
1475 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1476 | wasm_i16x8_narrow_i32x4(v128_t __a, v128_t __b) { |
1477 | return (v128_t)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4)__a, |
1478 | (__i32x4)__b); |
1479 | } |
1480 | |
1481 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1482 | wasm_u16x8_narrow_i32x4(v128_t __a, v128_t __b) { |
1483 | return (v128_t)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4)__a, |
1484 | (__i32x4)__b); |
1485 | } |
1486 | |
1487 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1488 | wasm_i16x8_extend_low_i8x16(v128_t __a) { |
1489 | return (v128_t) __builtin_convertvector( |
1490 | (__i8x8){((__i8x16)__a)[0], ((__i8x16)__a)[1], ((__i8x16)__a)[2], |
1491 | ((__i8x16)__a)[3], ((__i8x16)__a)[4], ((__i8x16)__a)[5], |
1492 | ((__i8x16)__a)[6], ((__i8x16)__a)[7]}, |
1493 | __i16x8); |
1494 | } |
1495 | |
1496 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1497 | wasm_i16x8_extend_high_i8x16(v128_t __a) { |
1498 | return (v128_t) __builtin_convertvector( |
1499 | (__i8x8){((__i8x16)__a)[8], ((__i8x16)__a)[9], ((__i8x16)__a)[10], |
1500 | ((__i8x16)__a)[11], ((__i8x16)__a)[12], ((__i8x16)__a)[13], |
1501 | ((__i8x16)__a)[14], ((__i8x16)__a)[15]}, |
1502 | __i16x8); |
1503 | } |
1504 | |
1505 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1506 | wasm_u16x8_extend_low_u8x16(v128_t __a) { |
1507 | return (v128_t) __builtin_convertvector( |
1508 | (__u8x8){((__u8x16)__a)[0], ((__u8x16)__a)[1], ((__u8x16)__a)[2], |
1509 | ((__u8x16)__a)[3], ((__u8x16)__a)[4], ((__u8x16)__a)[5], |
1510 | ((__u8x16)__a)[6], ((__u8x16)__a)[7]}, |
1511 | __u16x8); |
1512 | } |
1513 | |
1514 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1515 | wasm_u16x8_extend_high_u8x16(v128_t __a) { |
1516 | return (v128_t) __builtin_convertvector( |
1517 | (__u8x8){((__u8x16)__a)[8], ((__u8x16)__a)[9], ((__u8x16)__a)[10], |
1518 | ((__u8x16)__a)[11], ((__u8x16)__a)[12], ((__u8x16)__a)[13], |
1519 | ((__u8x16)__a)[14], ((__u8x16)__a)[15]}, |
1520 | __u16x8); |
1521 | } |
1522 | |
1523 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1524 | wasm_i32x4_extend_low_i16x8(v128_t __a) { |
1525 | return (v128_t) __builtin_convertvector( |
1526 | (__i16x4){((__i16x8)__a)[0], ((__i16x8)__a)[1], ((__i16x8)__a)[2], |
1527 | ((__i16x8)__a)[3]}, |
1528 | __i32x4); |
1529 | } |
1530 | |
1531 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1532 | wasm_i32x4_extend_high_i16x8(v128_t __a) { |
1533 | return (v128_t) __builtin_convertvector( |
1534 | (__i16x4){((__i16x8)__a)[4], ((__i16x8)__a)[5], ((__i16x8)__a)[6], |
1535 | ((__i16x8)__a)[7]}, |
1536 | __i32x4); |
1537 | } |
1538 | |
1539 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1540 | wasm_u32x4_extend_low_u16x8(v128_t __a) { |
1541 | return (v128_t) __builtin_convertvector( |
1542 | (__u16x4){((__u16x8)__a)[0], ((__u16x8)__a)[1], ((__u16x8)__a)[2], |
1543 | ((__u16x8)__a)[3]}, |
1544 | __u32x4); |
1545 | } |
1546 | |
1547 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1548 | wasm_u32x4_extend_high_u16x8(v128_t __a) { |
1549 | return (v128_t) __builtin_convertvector( |
1550 | (__u16x4){((__u16x8)__a)[4], ((__u16x8)__a)[5], ((__u16x8)__a)[6], |
1551 | ((__u16x8)__a)[7]}, |
1552 | __u32x4); |
1553 | } |
1554 | |
1555 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1556 | wasm_i64x2_extend_low_i32x4(v128_t __a) { |
1557 | return (v128_t) __builtin_convertvector( |
1558 | (__i32x2){((__i32x4)__a)[0], ((__i32x4)__a)[1]}, __i64x2); |
1559 | } |
1560 | |
1561 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1562 | wasm_i64x2_extend_high_i32x4(v128_t __a) { |
1563 | return (v128_t) __builtin_convertvector( |
1564 | (__i32x2){((__i32x4)__a)[2], ((__i32x4)__a)[3]}, __i64x2); |
1565 | } |
1566 | |
1567 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1568 | wasm_u64x2_extend_low_u32x4(v128_t __a) { |
1569 | return (v128_t) __builtin_convertvector( |
1570 | (__u32x2){((__u32x4)__a)[0], ((__u32x4)__a)[1]}, __u64x2); |
1571 | } |
1572 | |
1573 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1574 | wasm_u64x2_extend_high_u32x4(v128_t __a) { |
1575 | return (v128_t) __builtin_convertvector( |
1576 | (__u32x2){((__u32x4)__a)[2], ((__u32x4)__a)[3]}, __u64x2); |
1577 | } |
1578 | |
1579 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1580 | wasm_i16x8_extadd_pairwise_i8x16(v128_t __a) { |
1581 | return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_s_i16x8((__i8x16)__a); |
1582 | } |
1583 | |
1584 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1585 | wasm_u16x8_extadd_pairwise_u8x16(v128_t __a) { |
1586 | return (v128_t)__builtin_wasm_extadd_pairwise_i8x16_u_i16x8((__u8x16)__a); |
1587 | } |
1588 | |
1589 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1590 | wasm_i32x4_extadd_pairwise_i16x8(v128_t __a) { |
1591 | return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_s_i32x4((__i16x8)__a); |
1592 | } |
1593 | |
1594 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1595 | wasm_u32x4_extadd_pairwise_u16x8(v128_t __a) { |
1596 | return (v128_t)__builtin_wasm_extadd_pairwise_i16x8_u_i32x4((__u16x8)__a); |
1597 | } |
1598 | |
1599 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1600 | wasm_i16x8_extmul_low_i8x16(v128_t __a, v128_t __b) { |
1601 | return (v128_t)((__i16x8)wasm_i16x8_extend_low_i8x16(__a) * |
1602 | (__i16x8)wasm_i16x8_extend_low_i8x16(__b)); |
1603 | } |
1604 | |
1605 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1606 | wasm_i16x8_extmul_high_i8x16(v128_t __a, v128_t __b) { |
1607 | return (v128_t)((__i16x8)wasm_i16x8_extend_high_i8x16(__a) * |
1608 | (__i16x8)wasm_i16x8_extend_high_i8x16(__b)); |
1609 | } |
1610 | |
1611 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1612 | wasm_u16x8_extmul_low_u8x16(v128_t __a, v128_t __b) { |
1613 | return (v128_t)((__u16x8)wasm_u16x8_extend_low_u8x16(__a) * |
1614 | (__u16x8)wasm_u16x8_extend_low_u8x16(__b)); |
1615 | } |
1616 | |
1617 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1618 | wasm_u16x8_extmul_high_u8x16(v128_t __a, v128_t __b) { |
1619 | return (v128_t)((__u16x8)wasm_u16x8_extend_high_u8x16(__a) * |
1620 | (__u16x8)wasm_u16x8_extend_high_u8x16(__b)); |
1621 | } |
1622 | |
1623 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1624 | wasm_i32x4_extmul_low_i16x8(v128_t __a, v128_t __b) { |
1625 | return (v128_t)((__i32x4)wasm_i32x4_extend_low_i16x8(__a) * |
1626 | (__i32x4)wasm_i32x4_extend_low_i16x8(__b)); |
1627 | } |
1628 | |
1629 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1630 | wasm_i32x4_extmul_high_i16x8(v128_t __a, v128_t __b) { |
1631 | return (v128_t)((__i32x4)wasm_i32x4_extend_high_i16x8(__a) * |
1632 | (__i32x4)wasm_i32x4_extend_high_i16x8(__b)); |
1633 | } |
1634 | |
1635 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1636 | wasm_u32x4_extmul_low_u16x8(v128_t __a, v128_t __b) { |
1637 | return (v128_t)((__u32x4)wasm_u32x4_extend_low_u16x8(__a) * |
1638 | (__u32x4)wasm_u32x4_extend_low_u16x8(__b)); |
1639 | } |
1640 | |
1641 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1642 | wasm_u32x4_extmul_high_u16x8(v128_t __a, v128_t __b) { |
1643 | return (v128_t)((__u32x4)wasm_u32x4_extend_high_u16x8(__a) * |
1644 | (__u32x4)wasm_u32x4_extend_high_u16x8(__b)); |
1645 | } |
1646 | |
1647 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1648 | wasm_i64x2_extmul_low_i32x4(v128_t __a, v128_t __b) { |
1649 | return (v128_t)((__i64x2)wasm_i64x2_extend_low_i32x4(__a) * |
1650 | (__i64x2)wasm_i64x2_extend_low_i32x4(__b)); |
1651 | } |
1652 | |
1653 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1654 | wasm_i64x2_extmul_high_i32x4(v128_t __a, v128_t __b) { |
1655 | return (v128_t)((__i64x2)wasm_i64x2_extend_high_i32x4(__a) * |
1656 | (__i64x2)wasm_i64x2_extend_high_i32x4(__b)); |
1657 | } |
1658 | |
1659 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1660 | wasm_u64x2_extmul_low_u32x4(v128_t __a, v128_t __b) { |
1661 | return (v128_t)((__u64x2)wasm_u64x2_extend_low_u32x4(__a) * |
1662 | (__u64x2)wasm_u64x2_extend_low_u32x4(__b)); |
1663 | } |
1664 | |
1665 | static __inline__ v128_t __DEFAULT_FN_ATTRS |
1666 | wasm_u64x2_extmul_high_u32x4(v128_t __a, v128_t __b) { |
1667 | return (v128_t)((__u64x2)wasm_u64x2_extend_high_u32x4(__a) * |
1668 | (__u64x2)wasm_u64x2_extend_high_u32x4(__b)); |
1669 | } |
1670 | |
1671 | static __inline__ v128_t __DEFAULT_FN_ATTRS wasm_i16x8_q15mulr_sat(v128_t __a, |
1672 | v128_t __b) { |
1673 | return (v128_t)__builtin_wasm_q15mulr_sat_s_i16x8((__i16x8)__a, (__i16x8)__b); |
1674 | } |
1675 | |
1676 | // Old intrinsic names supported to ease transitioning to the standard names. Do |
1677 | // not use these; they will be removed in the near future. |
1678 | |
1679 | #define __DEPRECATED_FN_ATTRS(__replacement) \ |
1680 | __DEFAULT_FN_ATTRS __attribute__( \ |
1681 | (deprecated("use " __replacement " instead", __replacement))) |
1682 | |
1683 | #define __WASM_STR(X) #X |
1684 | |
1685 | #ifdef __DEPRECATED |
1686 | #define __DEPRECATED_WASM_MACRO(__name, __replacement) \ |
1687 | _Pragma(__WASM_STR(GCC warning( \ |
1688 | "'" __name "' is deprecated: use '" __replacement "' instead"))) |
1689 | #else |
1690 | #define __DEPRECATED_WASM_MACRO(__name, __replacement) |
1691 | #endif |
1692 | |
1693 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load8_splat") |
1694 | wasm_v8x16_load_splat(const void *__mem) { |
1695 | return wasm_v128_load8_splat(__mem); |
1696 | } |
1697 | |
1698 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load16_splat") |
1699 | wasm_v16x8_load_splat(const void *__mem) { |
1700 | return wasm_v128_load16_splat(__mem); |
1701 | } |
1702 | |
1703 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load32_splat") |
1704 | wasm_v32x4_load_splat(const void *__mem) { |
1705 | return wasm_v128_load32_splat(__mem); |
1706 | } |
1707 | |
1708 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_v128_load64_splat") |
1709 | wasm_v64x2_load_splat(const void *__mem) { |
1710 | return wasm_v128_load64_splat(__mem); |
1711 | } |
1712 | |
1713 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_load8x8") |
1714 | wasm_i16x8_load_8x8(const void *__mem) { |
1715 | return wasm_i16x8_load8x8(__mem); |
1716 | } |
1717 | |
1718 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_load8x8") |
1719 | wasm_u16x8_load_8x8(const void *__mem) { |
1720 | return wasm_u16x8_load8x8(__mem); |
1721 | } |
1722 | |
1723 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_load16x4") |
1724 | wasm_i32x4_load_16x4(const void *__mem) { |
1725 | return wasm_i32x4_load16x4(__mem); |
1726 | } |
1727 | |
1728 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_load16x4") |
1729 | wasm_u32x4_load_16x4(const void *__mem) { |
1730 | return wasm_u32x4_load16x4(__mem); |
1731 | } |
1732 | |
1733 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i64x2_load32x2") |
1734 | wasm_i64x2_load_32x2(const void *__mem) { |
1735 | return wasm_i64x2_load32x2(__mem); |
1736 | } |
1737 | |
1738 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u64x2_load32x2") |
1739 | wasm_u64x2_load_32x2(const void *__mem) { |
1740 | return wasm_u64x2_load32x2(__mem); |
1741 | } |
1742 | |
1743 | #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ |
1744 | __c7, __c8, __c9, __c10, __c11, __c12, __c13, \ |
1745 | __c14, __c15) \ |
1746 | __DEPRECATED_WASM_MACRO("wasm_v8x16_shuffle", "wasm_i8x16_shuffle") \ |
1747 | wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \ |
1748 | __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15) |
1749 | |
1750 | #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \ |
1751 | __c7) \ |
1752 | __DEPRECATED_WASM_MACRO("wasm_v16x8_shuffle", "wasm_i16x8_shuffle") \ |
1753 | wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7) |
1754 | |
1755 | #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \ |
1756 | __DEPRECATED_WASM_MACRO("wasm_v32x4_shuffle", "wasm_i32x4_shuffle") \ |
1757 | wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) |
1758 | |
1759 | #define wasm_v64x2_shuffle(__a, __b, __c0, __c1) \ |
1760 | __DEPRECATED_WASM_MACRO("wasm_v64x2_shuffle", "wasm_i64x2_shuffle") \ |
1761 | wasm_i64x2_shuffle(__a, __b, __c0, __c1) |
1762 | |
1763 | // Relaxed SIMD intrinsics |
1764 | |
1765 | #define __RELAXED_FN_ATTRS \ |
1766 | __attribute__((__always_inline__, __nodebug__, __target__("relaxed-simd"), \ |
1767 | __min_vector_width__(128))) |
1768 | |
1769 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1770 | wasm_f32x4_relaxed_madd(v128_t __a, v128_t __b, v128_t __c) { |
1771 | return (v128_t)__builtin_wasm_relaxed_madd_f32x4((__f32x4)__a, (__f32x4)__b, |
1772 | (__f32x4)__c); |
1773 | } |
1774 | |
1775 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1776 | wasm_f32x4_relaxed_nmadd(v128_t __a, v128_t __b, v128_t __c) { |
1777 | return (v128_t)__builtin_wasm_relaxed_nmadd_f32x4((__f32x4)__a, (__f32x4)__b, |
1778 | (__f32x4)__c); |
1779 | } |
1780 | |
1781 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1782 | wasm_f64x2_relaxed_madd(v128_t __a, v128_t __b, v128_t __c) { |
1783 | return (v128_t)__builtin_wasm_relaxed_madd_f64x2((__f64x2)__a, (__f64x2)__b, |
1784 | (__f64x2)__c); |
1785 | } |
1786 | |
1787 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1788 | wasm_f64x2_relaxed_nmadd(v128_t __a, v128_t __b, v128_t __c) { |
1789 | return (v128_t)__builtin_wasm_relaxed_nmadd_f64x2((__f64x2)__a, (__f64x2)__b, |
1790 | (__f64x2)__c); |
1791 | } |
1792 | |
1793 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1794 | wasm_i8x16_relaxed_laneselect(v128_t __a, v128_t __b, v128_t __m) { |
1795 | return (v128_t)__builtin_wasm_relaxed_laneselect_i8x16( |
1796 | (__i8x16)__a, (__i8x16)__b, (__i8x16)__m); |
1797 | } |
1798 | |
1799 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1800 | wasm_i16x8_relaxed_laneselect(v128_t __a, v128_t __b, v128_t __m) { |
1801 | return (v128_t)__builtin_wasm_relaxed_laneselect_i16x8( |
1802 | (__i16x8)__a, (__i16x8)__b, (__i16x8)__m); |
1803 | } |
1804 | |
1805 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1806 | wasm_i32x4_relaxed_laneselect(v128_t __a, v128_t __b, v128_t __m) { |
1807 | return (v128_t)__builtin_wasm_relaxed_laneselect_i32x4( |
1808 | (__i32x4)__a, (__i32x4)__b, (__i32x4)__m); |
1809 | } |
1810 | |
1811 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1812 | wasm_i64x2_relaxed_laneselect(v128_t __a, v128_t __b, v128_t __m) { |
1813 | return (v128_t)__builtin_wasm_relaxed_laneselect_i64x2( |
1814 | (__i64x2)__a, (__i64x2)__b, (__i64x2)__m); |
1815 | } |
1816 | |
1817 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1818 | wasm_i8x16_relaxed_swizzle(v128_t __a, v128_t __s) { |
1819 | return (v128_t)__builtin_wasm_relaxed_swizzle_i8x16((__i8x16)__a, |
1820 | (__i8x16)__s); |
1821 | } |
1822 | |
1823 | static __inline__ v128_t __RELAXED_FN_ATTRS wasm_f32x4_relaxed_min(v128_t __a, |
1824 | v128_t __b) { |
1825 | return (v128_t)__builtin_wasm_relaxed_min_f32x4((__f32x4)__a, (__f32x4)__b); |
1826 | } |
1827 | |
1828 | static __inline__ v128_t __RELAXED_FN_ATTRS wasm_f32x4_relaxed_max(v128_t __a, |
1829 | v128_t __b) { |
1830 | return (v128_t)__builtin_wasm_relaxed_max_f32x4((__f32x4)__a, (__f32x4)__b); |
1831 | } |
1832 | |
1833 | static __inline__ v128_t __RELAXED_FN_ATTRS wasm_f64x2_relaxed_min(v128_t __a, |
1834 | v128_t __b) { |
1835 | return (v128_t)__builtin_wasm_relaxed_min_f64x2((__f64x2)__a, (__f64x2)__b); |
1836 | } |
1837 | |
1838 | static __inline__ v128_t __RELAXED_FN_ATTRS wasm_f64x2_relaxed_max(v128_t __a, |
1839 | v128_t __b) { |
1840 | return (v128_t)__builtin_wasm_relaxed_max_f64x2((__f64x2)__a, (__f64x2)__b); |
1841 | } |
1842 | |
1843 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1844 | wasm_i32x4_relaxed_trunc_f32x4(v128_t __a) { |
1845 | return (v128_t)__builtin_wasm_relaxed_trunc_s_i32x4_f32x4((__f32x4)__a); |
1846 | } |
1847 | |
1848 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1849 | wasm_u32x4_relaxed_trunc_f32x4(v128_t __a) { |
1850 | return (v128_t)__builtin_wasm_relaxed_trunc_u_i32x4_f32x4((__f32x4)__a); |
1851 | } |
1852 | |
1853 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1854 | wasm_i32x4_relaxed_trunc_f64x2_zero(v128_t __a) { |
1855 | return (v128_t)__builtin_wasm_relaxed_trunc_s_zero_i32x4_f64x2((__f64x2)__a); |
1856 | } |
1857 | |
1858 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1859 | wasm_u32x4_relaxed_trunc_f64x2_zero(v128_t __a) { |
1860 | return (v128_t)__builtin_wasm_relaxed_trunc_u_zero_i32x4_f64x2((__f64x2)__a); |
1861 | } |
1862 | |
1863 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1864 | wasm_i16x8_relaxed_q15mulr(v128_t __a, v128_t __b) { |
1865 | return (v128_t)__builtin_wasm_relaxed_q15mulr_s_i16x8((__i16x8)__a, |
1866 | (__i16x8)__b); |
1867 | } |
1868 | |
1869 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1870 | wasm_i16x8_relaxed_dot_i8x16_i7x16(v128_t __a, v128_t __b) { |
1871 | return (v128_t)__builtin_wasm_relaxed_dot_i8x16_i7x16_s_i16x8((__i8x16)__a, |
1872 | (__i8x16)__b); |
1873 | } |
1874 | |
1875 | static __inline__ v128_t __RELAXED_FN_ATTRS |
1876 | wasm_i32x4_relaxed_dot_i8x16_i7x16_add(v128_t __a, v128_t __b, v128_t __c) { |
1877 | return (v128_t)__builtin_wasm_relaxed_dot_i8x16_i7x16_add_s_i32x4( |
1878 | (__i8x16)__a, (__i8x16)__b, (__i32x4)__c); |
1879 | } |
1880 | |
1881 | // Deprecated intrinsics |
1882 | |
1883 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_swizzle") |
1884 | wasm_v8x16_swizzle(v128_t __a, v128_t __b) { |
1885 | return wasm_i8x16_swizzle(__a, __b); |
1886 | } |
1887 | |
1888 | static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true") |
1889 | wasm_i8x16_any_true(v128_t __a) { |
1890 | return wasm_v128_any_true(__a); |
1891 | } |
1892 | |
1893 | static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true") |
1894 | wasm_i16x8_any_true(v128_t __a) { |
1895 | return wasm_v128_any_true(__a); |
1896 | } |
1897 | |
1898 | static __inline__ bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true") |
1899 | wasm_i32x4_any_true(v128_t __a) { |
1900 | return wasm_v128_any_true(__a); |
1901 | } |
1902 | |
1903 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_add_sat") |
1904 | wasm_i8x16_add_saturate(v128_t __a, v128_t __b) { |
1905 | return wasm_i8x16_add_sat(__a, __b); |
1906 | } |
1907 | |
1908 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_add_sat") |
1909 | wasm_u8x16_add_saturate(v128_t __a, v128_t __b) { |
1910 | return wasm_u8x16_add_sat(__a, __b); |
1911 | } |
1912 | |
1913 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i8x16_sub_sat") |
1914 | wasm_i8x16_sub_saturate(v128_t __a, v128_t __b) { |
1915 | return wasm_i8x16_sub_sat(__a, __b); |
1916 | } |
1917 | |
1918 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u8x16_sub_sat") |
1919 | wasm_u8x16_sub_saturate(v128_t __a, v128_t __b) { |
1920 | return wasm_u8x16_sub_sat(__a, __b); |
1921 | } |
1922 | |
1923 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_add_sat") |
1924 | wasm_i16x8_add_saturate(v128_t __a, v128_t __b) { |
1925 | return wasm_i16x8_add_sat(__a, __b); |
1926 | } |
1927 | |
1928 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_add_sat") |
1929 | wasm_u16x8_add_saturate(v128_t __a, v128_t __b) { |
1930 | return wasm_u16x8_add_sat(__a, __b); |
1931 | } |
1932 | |
1933 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_sub_sat") |
1934 | wasm_i16x8_sub_saturate(v128_t __a, v128_t __b) { |
1935 | return wasm_i16x8_sub_sat(__a, __b); |
1936 | } |
1937 | |
1938 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_sub_sat") |
1939 | wasm_u16x8_sub_saturate(v128_t __a, v128_t __b) { |
1940 | return wasm_u16x8_sub_sat(__a, __b); |
1941 | } |
1942 | |
1943 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_low_i8x16") |
1944 | wasm_i16x8_widen_low_i8x16(v128_t __a) { |
1945 | return wasm_i16x8_extend_low_i8x16(__a); |
1946 | } |
1947 | |
1948 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i16x8_extend_high_i8x16") |
1949 | wasm_i16x8_widen_high_i8x16(v128_t __a) { |
1950 | return wasm_i16x8_extend_high_i8x16(__a); |
1951 | } |
1952 | |
1953 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_low_u8x16") |
1954 | wasm_i16x8_widen_low_u8x16(v128_t __a) { |
1955 | return wasm_u16x8_extend_low_u8x16(__a); |
1956 | } |
1957 | |
1958 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u16x8_extend_high_u8x16") |
1959 | wasm_i16x8_widen_high_u8x16(v128_t __a) { |
1960 | return wasm_u16x8_extend_high_u8x16(__a); |
1961 | } |
1962 | |
1963 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_low_i16x8") |
1964 | wasm_i32x4_widen_low_i16x8(v128_t __a) { |
1965 | return wasm_i32x4_extend_low_i16x8(__a); |
1966 | } |
1967 | |
1968 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_extend_high_i16x8") |
1969 | wasm_i32x4_widen_high_i16x8(v128_t __a) { |
1970 | return wasm_i32x4_extend_high_i16x8(__a); |
1971 | } |
1972 | |
1973 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_low_u16x8") |
1974 | wasm_i32x4_widen_low_u16x8(v128_t __a) { |
1975 | return wasm_u32x4_extend_low_u16x8(__a); |
1976 | } |
1977 | |
1978 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_extend_high_u16x8") |
1979 | wasm_i32x4_widen_high_u16x8(v128_t __a) { |
1980 | return wasm_u32x4_extend_high_u16x8(__a); |
1981 | } |
1982 | |
1983 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_i32x4_trunc_sat_f32x4") |
1984 | wasm_i32x4_trunc_saturate_f32x4(v128_t __a) { |
1985 | return wasm_i32x4_trunc_sat_f32x4(__a); |
1986 | } |
1987 | |
1988 | static __inline__ v128_t __DEPRECATED_FN_ATTRS("wasm_u32x4_trunc_sat_f32x4") |
1989 | wasm_u32x4_trunc_saturate_f32x4(v128_t __a) { |
1990 | return wasm_u32x4_trunc_sat_f32x4(__a); |
1991 | } |
1992 | |
1993 | // Undefine helper macros |
1994 | #undef __DEFAULT_FN_ATTRS |
1995 | #undef __DEPRECATED_FN_ATTRS |
1996 | |
1997 | #endif // __WASM_SIMD128_H |
1998 |
Warning: This file is not a C or C++ file. It does not have highlighting.